Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: using custom S3 endpoint #362

Merged
merged 8 commits into from
Jun 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
588 changes: 356 additions & 232 deletions Cargo.lock

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,19 @@ version = "0.11.0"
authors = ["Manuel Holtgrewe <manuel.holtgrewe@bih-charite.de>"]
description = "Rust-based worker for varfish-server"
license = "MIT"
homepage = "https://github.com/bihealth/varfish-server-worker"
repository = "https://github.com/bihealth/varfish-server-worker"
edition = "2021"
readme = "README.md"
rust-version = "1.64.0"
rust-version = "1.70.0"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
annonars = "0.36"
anyhow = "1.0"
async-compression = { version = "0.4", features = ["tokio", "gzip"] }
aws-sdk-s3 = "0.34"
aws-config = "0.56"
aws-sdk-s3 = { version = "1.33", features = ["behavior-version-latest"] }
aws-config = { version = "1.5", features = ["behavior-version-latest"] }
base16ct = "0.2"
bio = "1.4"
biocommons-bioutils = "0.1"
Expand Down
48 changes: 39 additions & 9 deletions src/common/noodles.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,19 +101,51 @@ pub async fn s3_open_read_maybe_gz<P>(path: P) -> Result<Pin<Box<dyn AsyncBufRea
where
P: AsRef<Path>,
{
// Get configuration from environment variables.
let access_key = if let Ok(access_key) = std::env::var("AWS_ACCESS_KEY_ID") {
access_key
} else {
anyhow::bail!("could not access key from env AWS_ACCESS_KEY_ID")
};
let secret_key = if let Ok(secret_key) = std::env::var("AWS_SECRET_ACCESS_KEY") {
secret_key
} else {
anyhow::bail!("could not get secret key from env AWS_SECRET_ACCESS_KEY")
};
let endpoint_url = if let Ok(endpoint_url) = std::env::var("AWS_ENDPOINT_URL") {
endpoint_url
} else {
anyhow::bail!("could not get endpoint url from env AWS_ENDPOINT_URL")
};
let region = if let Ok(region) = std::env::var("AWS_REGION") {
region
} else {
anyhow::bail!("could not AWS region from env AWS_REGION")
};

let cred =
aws_sdk_s3::config::Credentials::new(access_key, secret_key, None, None, "loaded-from-env");
let s3_config = aws_sdk_s3::config::Builder::new()
.endpoint_url(&endpoint_url)
.credentials_provider(cred)
.region(aws_config::Region::new(region))
.force_path_style(true) // apply bucketname as path param instead of pre-domain
.build();

// Split bucket and path from input path.
let path_string = format!("{}", path.as_ref().display());
let (bucket, key) = if let Some((bucket, key)) = path_string.split_once('/') {
(bucket.to_string(), key.to_string())
} else {
anyhow::bail!("invalid S3 path: {}", path.as_ref().display());
};

let config = aws_config::load_from_env().await;
let client = aws_sdk_s3::Client::new(&config);
// Setup S3 client and access object.
let client = aws_sdk_s3::Client::from_conf(s3_config);
let object = client.get_object().bucket(&bucket).key(&key).send().await?;

let path_is_gzip = is_gz(path.as_ref());
tracing::trace!(
tracing::debug!(
"Opening S3 object {} as {} for reading (async)",
path.as_ref().display(),
if path_is_gzip {
Expand Down Expand Up @@ -155,21 +187,19 @@ pub async fn open_vcf_readers(paths: &[String]) -> Result<Vec<AsyncVcfReader>, a
/// The behaviour is as follows:
///
/// - If `path_in` is "-" then open stdin and read as plain text.
/// - If environment variable `AWS_PROFILE` is set to "varfish-s3" then enable S3 mode.
/// - If environment variable `AWS_ACCESS_KEY_ID` is set then enable S3 mode.
/// - If `path_in` is absolute or S3 mode is disabled then open `path_in` as local file
/// - Otherwise, attempt to open `path_in` as S3 object.
pub async fn open_vcf_reader(path_in: &str) -> Result<AsyncVcfReader, anyhow::Error> {
let s3_mode = match std::env::var("AWS_PROFILE") {
Ok(s) => s == "varfish-s3",
_ => false,
};
if s3_mode && !path_in.starts_with('/') {
if super::s3::s3_mode() && path_in != "-" && !path_in.starts_with('/') {
tracing::debug!("Opening S3 object {} for reading (async)", path_in);
Ok(vcf::AsyncReader::new(
s3_open_read_maybe_gz(path_in)
.await
.map_err(|e| anyhow::anyhow!("could not build VCF reader from S3 file: {}", e))?,
))
} else {
tracing::debug!("Opening local file {} for reading (async)", path_in);
Ok(vcf::AsyncReader::new(
open_read_maybe_gz(path_in).await.map_err(|e| {
anyhow::anyhow!("could not build VCF reader from local file: {}", e)
Expand Down
4 changes: 3 additions & 1 deletion src/common/s3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ use mehari::common::io::std::is_gz;

/// Helper that returns whether S3 mode has been enabled via `AWS_ACCESS_KEY_ID`.
pub fn s3_mode() -> bool {
std::env::var("AWS_ACCESS_KEY_ID").is_ok()
let result = std::env::var("AWS_ACCESS_KEY_ID").is_ok();
tracing::trace!("S3 mode is {}", if result { "enabled" } else { "disabled" });
result
}

/// Return the S3 configuration from environment variables.
Expand Down
5 changes: 3 additions & 2 deletions src/seqvars/ingest/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@

use std::sync::{Arc, OnceLock};

use crate::common::noodles::open_vcf_reader;
use crate::{
common::{self, worker_version, GenomeRelease},
flush_and_shutdown,
};
use futures::TryStreamExt;
use mehari::{
annotate::seqvars::provider::Provider as MehariProvider,
common::noodles::{open_vcf_reader, open_vcf_writer, AsyncVcfReader, AsyncVcfWriter},
common::noodles::{open_vcf_writer, AsyncVcfReader, AsyncVcfWriter},
};
use noodles_vcf as vcf;
use thousands::Separable;
Expand Down Expand Up @@ -427,7 +428,7 @@ async fn process_variants(
&mut output_record,
)?;
} else {
tracing::trace!(
tracing::debug!(
"Record @{:?} on non-canonical chromosome, skipping.",
&vcf_var
);
Expand Down
61 changes: 27 additions & 34 deletions src/seqvars/prefilter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,11 @@

use std::io::BufRead;

use crate::common::noodles::open_vcf_reader;
use futures::TryStreamExt;
use mehari::{
annotate::seqvars::ann::AnnField,
common::{
io::std::is_gz,
noodles::{open_vcf_reader, open_vcf_writer, AsyncVcfReader, AsyncVcfWriter},
},
common::noodles::{open_vcf_writer, AsyncVcfReader, AsyncVcfWriter},
};
use noodles_vcf as vcf;
use thousands::Separable;
Expand All @@ -20,7 +18,7 @@
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
struct PrefilterParams {
/// Path to output file.
pub path_out: String,
pub prefilter_path: String,
/// Maximal allele population frequency.
pub max_freq: f64,
/// Maximal distance to exon.
Expand Down Expand Up @@ -216,9 +214,10 @@
{
tracing::info!("opening output files...");
let mut output_writers = Vec::new();
let mut out_path_helpers = Vec::new();
for params in params_list.iter() {
let header_params = PrefilterParams {
path_out: "<stripped>".into(),
prefilter_path: "<stripped>".into(),
..params.clone()
};
let mut header = header.clone();
Expand All @@ -232,9 +231,17 @@
),
)?;

let mut writer = open_vcf_writer(&params.path_out).await?;
out_path_helpers.push(crate::common::s3::OutputPathHelper::new(
&params.prefilter_path,
)?);
let mut writer =
open_vcf_writer(&out_path_helpers.last().expect("just pushed").path_out()).await?;

Check warning on line 238 in src/seqvars/prefilter/mod.rs

View workflow job for this annotation

GitHub Actions / clippy

this expression creates a reference which is immediately dereferenced by the compiler

warning: this expression creates a reference which is immediately dereferenced by the compiler --> src/seqvars/prefilter/mod.rs:238:33 | 238 | open_vcf_writer(&out_path_helpers.last().expect("just pushed").path_out()).await?; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: change this to: `out_path_helpers.last().expect("just pushed").path_out()` | = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow = note: `#[warn(clippy::needless_borrow)]` on by default
writer.write_header(&header).await.map_err(|e| {
anyhow::anyhow!("could not write header to {}: {}", &params.path_out, e)
anyhow::anyhow!(
"could not write header to {}: {}",
&params.prefilter_path,
e
)
})?;
output_writers.push(writer);
}
Expand All @@ -248,28 +255,14 @@
for output_writer in output_writers.drain(..) {
flush_and_shutdown!(output_writer);
}
}

for params in params_list.iter() {
if is_gz(&params.path_out) {
tracing::info!("writing TBI index for {}...", &params.path_out);
crate::common::noodles::build_tbi(
&params.path_out,
&format!("{}.tbi", &params.path_out),
)
.await
.map_err(|e| anyhow::anyhow!("problem building TBI: {}", e))?;
tracing::info!("... done writing TBI index");
} else {
tracing::info!(
"(not building TBI index for plain text VCF file {}",
&params.path_out
);
for out_path_helper in out_path_helpers.drain(..) {
out_path_helper.create_tbi_for_bgzf().await?;
out_path_helper.upload_for_s3().await?;
}
}

tracing::info!(
"All of `seqvars ingest` completed in {:?}",
"All of `seqvars prefilter` completed in {:?}",
before_anything.elapsed()
);
Ok(())
Expand All @@ -285,7 +278,7 @@
path_in: "tests/seqvars/prefilter/ingest.vcf".into(),
params: vec![format!(
r#"{{
"path_out": "{}/out-1.vcf",
"prefilter_path": "{}/out-1.vcf",
"max_freq": 0.01,
"max_exon_dist": 200
}}"#,
Expand All @@ -301,7 +294,7 @@
))
.exists());

insta::assert_snapshot!(std::fs::read_to_string(&format!(
insta::assert_snapshot!(std::fs::read_to_string(format!(
"{}/out-1.vcf",
tmpdir.to_path_buf().to_str().unwrap()
))?);
Expand All @@ -314,7 +307,7 @@
let tmpdir = temp_testdir::TempDir::default();

let params_json = format!(
r#"{{"path_out": "{}/out-1.vcf", "max_freq": 0.01, "max_exon_dist": 200}}"#,
r#"{{"prefilter_path": "{}/out-1.vcf", "max_freq": 0.01, "max_exon_dist": 200}}"#,
tmpdir.to_path_buf().to_str().unwrap()
);

Expand All @@ -334,7 +327,7 @@
))
.exists());

insta::assert_snapshot!(std::fs::read_to_string(&format!(
insta::assert_snapshot!(std::fs::read_to_string(format!(
"{}/out-1.vcf",
tmpdir.to_path_buf().to_str().unwrap()
))?);
Expand All @@ -351,15 +344,15 @@
params: vec![
format!(
r#"{{
"path_out": "{}/out-1.vcf",
"prefilter_path": "{}/out-1.vcf",
"max_freq": 0.01,
"max_exon_dist": 200
}}"#,
tmpdir.to_path_buf().to_str().unwrap()
),
format!(
r#"{{
"path_out": "{}/out-2.vcf",
"prefilter_path": "{}/out-2.vcf",
"max_freq": 0,
"max_exon_dist": 20
}}"#,
Expand All @@ -381,11 +374,11 @@
))
.exists());

insta::assert_snapshot!(std::fs::read_to_string(&format!(
insta::assert_snapshot!(std::fs::read_to_string(format!(
"{}/out-1.vcf",
tmpdir.to_path_buf().to_str().unwrap()
))?);
insta::assert_snapshot!(std::fs::read_to_string(&format!(
insta::assert_snapshot!(std::fs::read_to_string(format!(
"{}/out-2.vcf",
tmpdir.to_path_buf().to_str().unwrap()
))?);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
---
source: src/seqvars/prefilter/mod.rs
expression: "std::fs::read_to_string(&format!(\"{}/out-1.vcf\",\n tmpdir.to_path_buf().to_str().unwrap()))?"
expression: "std::fs::read_to_string(format!(\"{}/out-1.vcf\",\n tmpdir.to_path_buf().to_str().unwrap()))?"
---
##fileformat=VCFv4.4
##INFO=<ID=gnomad_exomes_an,Number=1,Type=Integer,Description="Number of alleles in gnomAD exomes">
Expand Down Expand Up @@ -54,8 +54,7 @@ expression: "std::fs::read_to_string(&format!(\"{}/out-1.vcf\",\n
##PEDIGREE=<ID=Case_1_mother-N1-DNA1-WGS1>
##x-varfish-version=<ID=varfish-server-worker,Version="x.y.z">
##x-varfish-version=<ID=orig-caller,Name="GatkHaplotypeCaller",Version="3.7-0-gcfedb67">
##x-varfish-prefilter-params={"path_out":"<stripped>","max_freq":0.01,"max_exon_dist":200}
##x-varfish-prefilter-params={"prefilter_path":"<stripped>","max_freq":0.01,"max_exon_dist":200}
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Case_1_father-N1-DNA1-WGS1 Case_1_index-N1-DNA1-WGS1 Case_1_mother-N1-DNA1-WGS1
17 41210126 . C CTAGCACTT . . gnomad_exomes_an=31272;gnomad_exomes_hom=0;gnomad_exomes_het=85;gnomad_genomes_an=0;gnomad_genomes_hom=0;gnomad_genomes_het=0;ANN=CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007294.4|Coding|18/22|c.5194-975_5194-974insAAGTGCTA|p.?|5307/7088|5194/5592||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007297.4|Coding|17/21|c.5053-975_5053-974insAAGTGCTA|p.?|5247/7028|5053/5451||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007299.4|Coding|18/21|c.1882-975_1882-974insAAGTGCTA|p.?|1989/3696|1882/2100||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007300.4|Coding|19/23|c.5257-975_5257-974insAAGTGCTA|p.?|5370/7151|5257/5655||-974| GT:AD:DP:GQ 0/0:29:29:87 0/1:23:36:99 0/1:15:32:99
MT 750 . A G . . . GT:AD:DP:GQ 1/1:0:2757:99 1/1:0:2392:99 1/1:0:1621:99

Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
---
source: src/seqvars/prefilter/mod.rs
expression: "std::fs::read_to_string(&format!(\"{}/out-1.vcf\",\n tmpdir.to_path_buf().to_str().unwrap()))?"
expression: "std::fs::read_to_string(format!(\"{}/out-1.vcf\",\n tmpdir.to_path_buf().to_str().unwrap()))?"
---
##fileformat=VCFv4.4
##INFO=<ID=gnomad_exomes_an,Number=1,Type=Integer,Description="Number of alleles in gnomAD exomes">
Expand Down Expand Up @@ -54,8 +54,7 @@ expression: "std::fs::read_to_string(&format!(\"{}/out-1.vcf\",\n
##PEDIGREE=<ID=Case_1_mother-N1-DNA1-WGS1>
##x-varfish-version=<ID=varfish-server-worker,Version="x.y.z">
##x-varfish-version=<ID=orig-caller,Name="GatkHaplotypeCaller",Version="3.7-0-gcfedb67">
##x-varfish-prefilter-params={"path_out":"<stripped>","max_freq":0.01,"max_exon_dist":200}
##x-varfish-prefilter-params={"prefilter_path":"<stripped>","max_freq":0.01,"max_exon_dist":200}
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Case_1_father-N1-DNA1-WGS1 Case_1_index-N1-DNA1-WGS1 Case_1_mother-N1-DNA1-WGS1
17 41210126 . C CTAGCACTT . . gnomad_exomes_an=31272;gnomad_exomes_hom=0;gnomad_exomes_het=85;gnomad_genomes_an=0;gnomad_genomes_hom=0;gnomad_genomes_het=0;ANN=CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007294.4|Coding|18/22|c.5194-975_5194-974insAAGTGCTA|p.?|5307/7088|5194/5592||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007297.4|Coding|17/21|c.5053-975_5053-974insAAGTGCTA|p.?|5247/7028|5053/5451||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007299.4|Coding|18/21|c.1882-975_1882-974insAAGTGCTA|p.?|1989/3696|1882/2100||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007300.4|Coding|19/23|c.5257-975_5257-974insAAGTGCTA|p.?|5370/7151|5257/5655||-974| GT:AD:DP:GQ 0/0:29:29:87 0/1:23:36:99 0/1:15:32:99
MT 750 . A G . . . GT:AD:DP:GQ 1/1:0:2757:99 1/1:0:2392:99 1/1:0:1621:99

Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
---
source: src/seqvars/prefilter/mod.rs
expression: "std::fs::read_to_string(&format!(\"{}/out-2.vcf\",\n tmpdir.to_path_buf().to_str().unwrap()))?"
expression: "std::fs::read_to_string(format!(\"{}/out-2.vcf\",\n tmpdir.to_path_buf().to_str().unwrap()))?"
---
##fileformat=VCFv4.4
##INFO=<ID=gnomad_exomes_an,Number=1,Type=Integer,Description="Number of alleles in gnomAD exomes">
Expand Down Expand Up @@ -54,7 +54,6 @@ expression: "std::fs::read_to_string(&format!(\"{}/out-2.vcf\",\n
##PEDIGREE=<ID=Case_1_mother-N1-DNA1-WGS1>
##x-varfish-version=<ID=varfish-server-worker,Version="x.y.z">
##x-varfish-version=<ID=orig-caller,Name="GatkHaplotypeCaller",Version="3.7-0-gcfedb67">
##x-varfish-prefilter-params={"path_out":"<stripped>","max_freq":0.0,"max_exon_dist":20}
##x-varfish-prefilter-params={"prefilter_path":"<stripped>","max_freq":0.0,"max_exon_dist":20}
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Case_1_father-N1-DNA1-WGS1 Case_1_index-N1-DNA1-WGS1 Case_1_mother-N1-DNA1-WGS1
MT 750 . A G . . . GT:AD:DP:GQ 1/1:0:2757:99 1/1:0:2392:99 1/1:0:1621:99

Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
---
source: src/seqvars/prefilter/mod.rs
expression: "std::fs::read_to_string(&format!(\"{}/out-1.vcf\",\n tmpdir.to_path_buf().to_str().unwrap()))?"
expression: "std::fs::read_to_string(format!(\"{}/out-1.vcf\",\n tmpdir.to_path_buf().to_str().unwrap()))?"
---
##fileformat=VCFv4.4
##INFO=<ID=gnomad_exomes_an,Number=1,Type=Integer,Description="Number of alleles in gnomAD exomes">
Expand Down Expand Up @@ -54,8 +54,7 @@ expression: "std::fs::read_to_string(&format!(\"{}/out-1.vcf\",\n
##PEDIGREE=<ID=Case_1_mother-N1-DNA1-WGS1>
##x-varfish-version=<ID=varfish-server-worker,Version="x.y.z">
##x-varfish-version=<ID=orig-caller,Name="GatkHaplotypeCaller",Version="3.7-0-gcfedb67">
##x-varfish-prefilter-params={"path_out":"<stripped>","max_freq":0.01,"max_exon_dist":200}
##x-varfish-prefilter-params={"prefilter_path":"<stripped>","max_freq":0.01,"max_exon_dist":200}
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Case_1_father-N1-DNA1-WGS1 Case_1_index-N1-DNA1-WGS1 Case_1_mother-N1-DNA1-WGS1
17 41210126 . C CTAGCACTT . . gnomad_exomes_an=31272;gnomad_exomes_hom=0;gnomad_exomes_het=85;gnomad_genomes_an=0;gnomad_genomes_hom=0;gnomad_genomes_het=0;ANN=CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007294.4|Coding|18/22|c.5194-975_5194-974insAAGTGCTA|p.?|5307/7088|5194/5592||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007297.4|Coding|17/21|c.5053-975_5053-974insAAGTGCTA|p.?|5247/7028|5053/5451||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007299.4|Coding|18/21|c.1882-975_1882-974insAAGTGCTA|p.?|1989/3696|1882/2100||-974|,CTAGCACTT|intron_variant|MODIFIER|BRCA1|HGNC:1100|transcript|NM_007300.4|Coding|19/23|c.5257-975_5257-974insAAGTGCTA|p.?|5370/7151|5257/5655||-974| GT:AD:DP:GQ 0/0:29:29:87 0/1:23:36:99 0/1:15:32:99
MT 750 . A G . . . GT:AD:DP:GQ 1/1:0:2757:99 1/1:0:2392:99 1/1:0:1621:99

4 changes: 2 additions & 2 deletions src/seqvars/query/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,11 @@ fn passes_for_gene(
.iter()
.for_each(|(sample_name, genotype_choice)| match genotype_choice {
Some(GenotypeChoice::ComphetIndex) => {
index_name = sample_name.clone();
index_name.clone_from(sample_name);
mode = Mode::ComphetRecessive;
}
Some(GenotypeChoice::RecessiveIndex) => {
index_name = sample_name.clone();
index_name.clone_from(sample_name);
mode = Mode::Recessive;
}
Some(GenotypeChoice::RecessiveParent) => {
Expand Down
3 changes: 2 additions & 1 deletion src/strucvars/ingest/mod.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
//! Implementation of `strucvars ingest` subcommand.

use crate::common::noodles::open_vcf_readers;
use crate::common::{self, worker_version, GenomeRelease};
use crate::flush_and_shutdown;
use futures::future::join_all;
use mehari::annotate::strucvars::guess_sv_caller;
use mehari::common::io::std::is_gz;
use mehari::common::noodles::{open_vcf_readers, open_vcf_writer, AsyncVcfReader, AsyncVcfWriter};
use mehari::common::noodles::{open_vcf_writer, AsyncVcfReader, AsyncVcfWriter};
use noodles_vcf as vcf;
use rand_core::SeedableRng;
use tokio::io::AsyncWriteExt;
Expand Down
Loading