Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion examples/zonky/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use postgresql_embedded::{PostgreSQL, Result, Settings};
async fn main() -> Result<()> {
let settings = Settings {
releases_url: zonky::URL.to_string(),
version: VersionReq::parse("=16.2.0")?,
version: VersionReq::parse("=16.3.0")?,
..Default::default()
};
let mut postgresql = PostgreSQL::new(settings);
Expand Down
14 changes: 4 additions & 10 deletions postgresql_archive/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ version.workspace = true
[dependencies]
anyhow = { workspace = true }
async-trait = { workspace = true }
flate2 = { workspace = true, optional = true }
flate2 = { workspace = true }
hex = { workspace = true }
http = { workspace = true }
human_bytes = { workspace = true, default-features = false }
Expand All @@ -29,15 +29,15 @@ serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, optional = true }
sha1 = { workspace = true, optional = true }
sha2 = { workspace = true, optional = true }
tar = { workspace = true, optional = true }
tar = { workspace = true }
target-triple = { workspace = true, optional = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"], optional = true }
tracing = { workspace = true, features = ["log"] }
url = { workspace = true }
xz2 = { workspace = true, optional = true }
zip = { workspace = true, optional = true }
xz2 = { workspace = true }
zip = { workspace = true }

[dev-dependencies]
criterion = { workspace = true }
Expand Down Expand Up @@ -66,17 +66,11 @@ rustls-tls = ["reqwest/rustls-tls-native-roots"]
sha1 = ["dep:sha1"]
sha2 = ["dep:sha2"]
theseus = [
"dep:flate2",
"dep:tar",
"dep:target-triple",
"github",
"sha2",
]
zonky = [
"dep:flate2",
"dep:tar",
"dep:xz2",
"dep:zip",
"maven",
]

Expand Down
5 changes: 4 additions & 1 deletion postgresql_archive/src/archive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

use crate::error::Result;
use crate::{extractor, repository};
use regex::Regex;
use semver::{Version, VersionReq};
use std::path::{Path, PathBuf};
use tracing::instrument;
Expand Down Expand Up @@ -43,7 +44,9 @@ pub async fn get_archive(url: &str, version_req: &VersionReq) -> Result<(Version
#[instrument(skip(bytes))]
pub async fn extract(url: &str, bytes: &Vec<u8>, out_dir: &Path) -> Result<Vec<PathBuf>> {
let extractor_fn = extractor::registry::get(url)?;
extractor_fn(bytes, out_dir)
let mut extract_directories = extractor::ExtractDirectories::default();
extract_directories.add_mapping(Regex::new(".*")?, out_dir.to_path_buf());
extractor_fn(bytes, extract_directories)
}

#[cfg(test)]
Expand Down
75 changes: 10 additions & 65 deletions postgresql_archive/src/configuration/theseus/extractor.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
use crate::extractor::{tar_gz_extract, ExtractDirectories};
use crate::Error::Unexpected;
use crate::Result;
use flate2::bufread::GzDecoder;
use human_bytes::human_bytes;
use num_format::{Locale, ToFormattedString};
use std::fs::{create_dir_all, remove_dir_all, remove_file, rename, File};
use std::io::{copy, BufReader, Cursor};
use regex::Regex;
use std::fs::{create_dir_all, remove_dir_all, remove_file, rename};
use std::path::{Path, PathBuf};
use std::thread::sleep;
use std::time::Duration;
use tar::Archive;
use tracing::{debug, instrument, warn};

/// Extracts the compressed tar `bytes` to the [out_dir](Path).
Expand All @@ -17,18 +14,14 @@ use tracing::{debug, instrument, warn};
/// Returns an error if the extraction fails.
#[allow(clippy::cast_precision_loss)]
#[instrument(skip(bytes))]
pub fn extract(bytes: &Vec<u8>, out_dir: &Path) -> Result<Vec<PathBuf>> {
let mut files = Vec::new();
let input = BufReader::new(Cursor::new(bytes));
let decoder = GzDecoder::new(input);
let mut archive = Archive::new(decoder);
let mut extracted_bytes = 0;
pub fn extract(bytes: &Vec<u8>, extract_directories: ExtractDirectories) -> Result<Vec<PathBuf>> {
let out_dir = extract_directories.get_path(".")?;

let parent_dir = if let Some(parent) = out_dir.parent() {
parent
} else {
debug!("No parent directory for {}", out_dir.to_string_lossy());
out_dir
out_dir.as_path()
};

create_dir_all(parent_dir)?;
Expand All @@ -42,55 +35,14 @@ pub fn extract(bytes: &Vec<u8>, out_dir: &Path) -> Result<Vec<PathBuf>> {
out_dir.to_string_lossy()
);
remove_file(&lock_file)?;
return Ok(files);
return Ok(Vec::new());
}

let extract_dir = tempfile::tempdir_in(parent_dir)?.into_path();
debug!("Extracting archive to {}", extract_dir.to_string_lossy());

for archive_entry in archive.entries()? {
let mut entry = archive_entry?;
let entry_header = entry.header();
let entry_type = entry_header.entry_type();
let entry_size = entry_header.size()?;
#[cfg(unix)]
let file_mode = entry_header.mode()?;

let entry_header_path = entry_header.path()?.to_path_buf();
let prefix = match entry_header_path.components().next() {
Some(component) => component.as_os_str().to_str().unwrap_or_default(),
None => {
return Err(Unexpected(
"Failed to get file header path prefix".to_string(),
));
}
};
let stripped_entry_header_path = entry_header_path.strip_prefix(prefix)?.to_path_buf();
let mut entry_name = extract_dir.clone();
entry_name.push(stripped_entry_header_path);

if entry_type.is_dir() || entry_name.is_dir() {
create_dir_all(&entry_name)?;
} else if entry_type.is_file() {
let mut output_file = File::create(&entry_name)?;
copy(&mut entry, &mut output_file)?;
extracted_bytes += entry_size;

#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
output_file.set_permissions(std::fs::Permissions::from_mode(file_mode))?;
}
files.push(entry_name);
} else if entry_type.is_symlink() {
#[cfg(unix)]
if let Some(symlink_target) = entry.link_name()? {
let symlink_path = entry_name.clone();
std::os::unix::fs::symlink(symlink_target.as_ref(), symlink_path)?;
files.push(entry_name);
}
}
}
let mut archive_extract_directories = ExtractDirectories::default();
archive_extract_directories.add_mapping(Regex::new(".*")?, extract_dir.clone());
let files = tar_gz_extract(bytes, archive_extract_directories)?;

if out_dir.exists() {
debug!(
Expand All @@ -113,13 +65,6 @@ pub fn extract(bytes: &Vec<u8>, out_dir: &Path) -> Result<Vec<PathBuf>> {
remove_file(lock_file)?;
}

let number_of_files = files.len();
debug!(
"Extracting {} files totalling {}",
number_of_files.to_formatted_string(&Locale::en),
human_bytes(extracted_bytes as f64)
);

Ok(files)
}

Expand Down
73 changes: 11 additions & 62 deletions postgresql_archive/src/configuration/zonky/extractor.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
use crate::extractor::{tar_xz_extract, ExtractDirectories};
use crate::Error::Unexpected;
use crate::Result;
use human_bytes::human_bytes;
use num_format::{Locale, ToFormattedString};
use std::fs::{create_dir_all, remove_dir_all, remove_file, rename, File};
use std::io::{copy, BufReader, Cursor};
use regex::Regex;
use std::fs::{create_dir_all, remove_dir_all, remove_file, rename};
use std::io::Cursor;
use std::path::{Path, PathBuf};
use std::thread::sleep;
use std::time::Duration;
use tar::Archive;
use tracing::{debug, instrument, warn};
use xz2::bufread::XzDecoder;
use zip::ZipArchive;

/// Extracts the compressed tar `bytes` to the [out_dir](Path).
Expand All @@ -19,13 +17,13 @@ use zip::ZipArchive;
#[allow(clippy::case_sensitive_file_extension_comparisons)]
#[allow(clippy::cast_precision_loss)]
#[instrument(skip(bytes))]
pub fn extract(bytes: &Vec<u8>, out_dir: &Path) -> Result<Vec<PathBuf>> {
let mut files = Vec::new();
pub fn extract(bytes: &Vec<u8>, extract_directories: ExtractDirectories) -> Result<Vec<PathBuf>> {
let out_dir = extract_directories.get_path(".")?;
let parent_dir = if let Some(parent) = out_dir.parent() {
parent
} else {
debug!("No parent directory for {}", out_dir.to_string_lossy());
out_dir
out_dir.as_path()
};

create_dir_all(parent_dir)?;
Expand All @@ -39,7 +37,7 @@ pub fn extract(bytes: &Vec<u8>, out_dir: &Path) -> Result<Vec<PathBuf>> {
out_dir.to_string_lossy()
);
remove_file(&lock_file)?;
return Ok(files);
return Ok(Vec::new());
}

let extract_dir = tempfile::tempdir_in(parent_dir)?.into_path();
Expand All @@ -64,51 +62,9 @@ pub fn extract(bytes: &Vec<u8>, out_dir: &Path) -> Result<Vec<PathBuf>> {
return Err(Unexpected("Failed to find archive file".to_string()));
}

let input = BufReader::new(Cursor::new(archive_bytes));
let decoder = XzDecoder::new(input);
let mut archive = Archive::new(decoder);
let mut extracted_bytes = 0;

for archive_entry in archive.entries()? {
let mut entry = archive_entry?;
let entry_header = entry.header();
let entry_type = entry_header.entry_type();
let entry_size = entry_header.size()?;
#[cfg(unix)]
let file_mode = entry_header.mode()?;

let entry_header_path = entry_header.path()?.to_path_buf();
let mut entry_name = extract_dir.clone();
entry_name.push(entry_header_path);

if let Some(parent) = entry_name.parent() {
if !parent.exists() {
create_dir_all(parent)?;
}
}

if entry_type.is_dir() || entry_name.is_dir() {
create_dir_all(&entry_name)?;
} else if entry_type.is_file() {
let mut output_file = File::create(&entry_name)?;
copy(&mut entry, &mut output_file)?;
extracted_bytes += entry_size;

#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
output_file.set_permissions(std::fs::Permissions::from_mode(file_mode))?;
}
files.push(entry_name);
} else if entry_type.is_symlink() {
#[cfg(unix)]
if let Some(symlink_target) = entry.link_name()? {
let symlink_path = entry_name.clone();
std::os::unix::fs::symlink(symlink_target.as_ref(), symlink_path)?;
files.push(entry_name);
}
}
}
let mut archive_extract_directories = ExtractDirectories::default();
archive_extract_directories.add_mapping(Regex::new(".*")?, extract_dir.clone());
let files = tar_xz_extract(&archive_bytes, archive_extract_directories)?;

if out_dir.exists() {
debug!(
Expand All @@ -131,13 +87,6 @@ pub fn extract(bytes: &Vec<u8>, out_dir: &Path) -> Result<Vec<PathBuf>> {
remove_file(lock_file)?;
}

let number_of_files = files.len();
debug!(
"Extracting {} files totalling {}",
number_of_files.to_formatted_string(&Locale::en),
human_bytes(extracted_bytes as f64)
);

Ok(files)
}

Expand Down
9 changes: 9 additions & 0 deletions postgresql_archive/src/extractor/mod.rs
Original file line number Diff line number Diff line change
@@ -1 +1,10 @@
mod model;
pub mod registry;
mod tar_gz_extractor;
mod tar_xz_extractor;
mod zip_extractor;

pub use model::ExtractDirectories;
pub use tar_gz_extractor::extract as tar_gz_extract;
pub use tar_xz_extractor::extract as tar_xz_extract;
pub use zip_extractor::extract as zip_extract;
Loading