Skip to content

Commit

Permalink
zip downloading (#36)
Browse files Browse the repository at this point in the history
allows installing from git
  • Loading branch information
bend-n committed May 26, 2023
1 parent b721fe1 commit dcc6b4e
Show file tree
Hide file tree
Showing 10 changed files with 512 additions and 178 deletions.
5 changes: 3 additions & 2 deletions Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "godot-package-manager"
version = "1.3.2"
version = "1.4.0"
edition = "2021"
authors = ["bendn <bend.n@outlook.com>"]
description = "A package manager for godot"
Expand All @@ -12,13 +12,14 @@ license = "Apache-2.0"
[dependencies]
clap = { version = "4.0.29", features = ["derive"] }
deser-hjson = "1.0.2"
flate2 = "1.0.25"
lazy_static = "1.4.0"
regex = "1.7.0"
serde = { version = "1.0.150", features = ["derive"] }
serde_json = "1.0.89"
serde_yaml = "0.9.14"
tar = "0.4.38"
flate2 = "1.0.25"
zip = { version = "0.6", features = ["bzip2"] }
toml = "0.5.10"
sha1 = "0.10.5"
console = "0.15.4"
Expand Down
4 changes: 2 additions & 2 deletions godot.lock
Expand Up @@ -6,8 +6,8 @@
},
{
"name": "@bendn/splitter",
"tarball": "https://registry.npmjs.org/@bendn/splitter/-/splitter-1.0.6.tgz",
"version": "1.0.6"
"tarball": "https://github.com/bend-n/splitter/archive/refs/heads/main.zip",
"version": "1.1.0"
},
{
"name": "@bendn/stockfish.gd",
Expand Down
4 changes: 2 additions & 2 deletions godot.package
@@ -1,5 +1,5 @@
packages: {
@bendn/test: "^2.0.0"
@bendn/splitter: "1.0.x"
@bendn/stockfish.gd: "1.*"
}
"https://github.com/bend-n/splitter/archive/refs/heads/main.zip": "1.0.x"
}
261 changes: 261 additions & 0 deletions src/archive.rs
@@ -0,0 +1,261 @@
use crate::config_file::*;
use crate::ctx;
use crate::package::Package;
use crate::Client;
use anyhow::{Context, Result};
use flate2::bufread::GzDecoder;
use serde::Serialize;
use std::fmt::Display;
use std::fs::{create_dir_all, set_permissions, File, Permissions};
use std::io::{self, prelude::*, Cursor};
use std::path::{Component::Normal, Path, PathBuf};
use tar::Archive as Tarchive;
use tar::EntryType::Directory;
use zip::result::{ZipError, ZipResult};
use zip::ZipArchive as Zarchive;

type TArch = Tarchive<GzDecoder<Cursor<Vec<u8>>>>;
type ZArch = Zarchive<Cursor<Vec<u8>>>;

#[derive(Default, Clone, Serialize, PartialEq, Eq, Ord, PartialOrd, Hash, Debug)]
pub struct Data {
#[serde(skip)]
pub bytes: Vec<u8>,
pub uri: String,
}

impl Data {
pub fn new(bytes: Vec<u8>, uri: String) -> Self {
Self { bytes, uri }
}
pub fn new_bytes(bytes: Vec<u8>) -> Self {
Self {
bytes,
uri: String::new(),
}
}
pub fn new_uri(uri: String) -> Self {
Self { bytes: vec![], uri }
}
}

#[derive(Default, Clone, Serialize, PartialEq, Eq, Ord, PartialOrd, Hash, Debug)]
#[serde(untagged)]
pub enum CompressionType {
Gzip(Data),
Zip(Data),
Lock(String),
#[default]
None,
}

impl Display for CompressionType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
CompressionType::Gzip(d) => write!(f, "{}", d.uri),
CompressionType::Zip(d) => write!(f, "{}", d.uri),
CompressionType::Lock(d) => write!(f, "{}", d),
_ => unreachable!(),
}
}
}

impl CompressionType {
pub fn from(ty: &str, bytes: Vec<u8>, uri: String) -> Self {
match ty {
"zip" => Self::Zip(Data::new(bytes, uri)),
_ => Self::Gzip(Data::new(bytes, uri)),
}
}

pub fn lock(&mut self) {
*self = Self::Lock(match self {
CompressionType::Gzip(d) => std::mem::take(&mut d.uri),
CompressionType::Zip(d) => std::mem::take(&mut d.uri),
_ => unreachable!(),
})
}
}

enum ArchiveType {
Gzip(Box<TArch>),
Zip(ZArch),
}

pub struct Archive {
inner: ArchiveType,
uri: String,
}

// impl<'a, Z> From<TArch<'a>> for Archive<'a> {
// fn from(value: TArch<'a>) -> Self {
// Self::Gzip(value)
// }
// }

// impl<'a> From<ZArch<'a>> for Archive<'a> {
// fn from(value: ZArch<'a>) -> Self {
// Self::Zip(value)
// }
// }

fn unpack_zarchive(archive: &mut ZArch, dst: &Path) -> ZipResult<()> {
if dst.symlink_metadata().is_err() {
create_dir_all(dst).map_err(ZipError::Io)?;
}
let dst = &dst.canonicalize().unwrap_or(dst.to_path_buf());

let mut directories = vec![];
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let path = dst.join(skip_toplevel(
file.enclosed_name().ok_or(ZipError::FileNotFound)?,
));
if file.is_dir() {
directories.push(path);
} else {
create_dir_all(path.parent().unwrap())?;
let mut outfile = File::create(&path)?;
io::copy(&mut file, &mut outfile)?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if let Some(mode) = file.unix_mode() {
set_permissions(&path, Permissions::from_mode(mode))?;
}
}
}
}
for path in directories {
create_dir_all(path)?;
}
Ok(())
}

fn skip_toplevel(p: &Path) -> PathBuf {
p.components()
.skip(1)
.filter(|c| matches!(c, Normal(_)))
.collect::<PathBuf>()
}

fn unpack_tarchive(archive: &mut TArch, dst: &Path) -> io::Result<()> {
if dst.symlink_metadata().is_err() {
create_dir_all(dst)?;
}

let dst = &dst.canonicalize().unwrap_or(dst.to_path_buf());

// Delay any directory entries until the end (they will be created if needed by
// descendants), to ensure that directory permissions do not interfer with descendant
// extraction.
let mut directories = Vec::new();
for entry in archive.entries()? {
let entry = entry?;
let mut entry = (dst.join(skip_toplevel(&entry.path()?)), entry);
if entry.1.header().entry_type() == Directory {
directories.push(entry);
} else {
create_dir_all(entry.0.parent().unwrap())?;
entry.1.unpack(entry.0)?;
}
}
for mut dir in directories {
dir.1.unpack(dir.0)?;
}
Ok(())
}

fn get_zfile(zarchive: &mut ZArch, search: &str, out: &mut String) -> ZipResult<()> {
for i in 0..zarchive.len() {
let mut file = zarchive.by_index(i)?;
if let Some(n) = file.enclosed_name() {
if let Some(base) = &Path::new(n).file_name() {
if base.to_string_lossy() == search {
file.read_to_string(out)?;
return Ok(());
}
}
}
}
Err(ZipError::FileNotFound)
}

fn get_gfile(tarchive: &mut TArch, file: &str, out: &mut String) -> io::Result<()> {
for entry in tarchive.entries()? {
let mut entry = entry?;
if let Ok(p) = entry.path() {
if p.file_name().ok_or(io::ErrorKind::InvalidData)? == file {
entry.read_to_string(out)?;
return Ok(());
}
}
}
Err(io::ErrorKind::InvalidData.into())
}

impl Archive {
pub fn unpack(&mut self, dst: &Path) -> Result<()> {
match &mut self.inner {
ArchiveType::Gzip(g) => unpack_tarchive(g, dst)?,
ArchiveType::Zip(z) => unpack_zarchive(z, dst)?,
}
Ok(())
}

pub fn get_file(&mut self, file: &str, out: &mut String) -> Result<()> {
match &mut self.inner {
ArchiveType::Gzip(g) => get_gfile(g, file, out)?,
ArchiveType::Zip(z) => get_zfile(z, file, out)?,
}
Ok(())
}

fn wrap(wrap: ArchiveType, uri: String) -> Self {
Self { inner: wrap, uri }
}

pub fn new(value: CompressionType) -> Result<Self> {
match value {
CompressionType::Gzip(data) => Ok(Self::new_gzip(data.bytes, data.uri)),
CompressionType::Zip(data) => Self::new_zip(data.bytes, data.uri),
_ => unreachable!(),
}
}

pub fn new_gzip(value: Vec<u8>, uri: String) -> Self {
Self::wrap(
ArchiveType::Gzip(Box::new(Tarchive::new(GzDecoder::new(Cursor::new(value))))),
uri,
)
}

pub fn new_zip(value: Vec<u8>, uri: String) -> Result<Self> {
Ok(Self::wrap(
ArchiveType::Zip(Zarchive::new(Cursor::new(value))?),
uri,
))
}
/// async trait + lifetimes = boom
pub async fn into_package(mut self, client: Client) -> Result<Package> {
let mut contents = String::new();
{
ctx!(
self.get_file("package.json", &mut contents),
"searching for package.json"
)?;
}
let ty = match self.inner {
ArchiveType::Zip(_) => CompressionType::Zip(Data::new_uri(self.uri)),
ArchiveType::Gzip(_) => CompressionType::Gzip(Data::new_uri(self.uri)),
};
ctx!(
ctx!(
ConfigFile::parse(&contents, ConfigType::JSON, client).await,
"parsing config file from package.json inside zipfile"
)?
.into_package(ty),
"turning config file into package"
)
}
}
41 changes: 29 additions & 12 deletions src/cache.rs
@@ -1,3 +1,5 @@
use crate::archive::*;
use crate::conversions::TryIntoAsync;
use crate::package::parsing::{Packument, ParsedManifest, ParsedPackage};
use crate::package::Package;
use crate::{ctx, Client};
Expand Down Expand Up @@ -127,15 +129,22 @@ impl std::fmt::Debug for VersionsCache {
}
}

#[derive(Debug, Clone, Default)] // yuck, a clone
#[derive(Default, Clone)] // yuck, a clone
pub enum CacheEntry {
Unparsed(ParsedPackage),
Parsed(Package),
Manifest(ParsedManifest),
Tarball(CompressionType),
#[default]
Empty,
}

impl From<CompressionType> for CacheEntry {
fn from(value: CompressionType) -> Self {
Self::Tarball(value)
}
}

impl From<Package> for CacheEntry {
fn from(value: Package) -> Self {
Self::Parsed(value)
Expand All @@ -153,22 +162,23 @@ impl From<ParsedPackage> for CacheEntry {
}

impl CacheEntry {
pub async fn parse(&mut self, client: Client, cache: Cache, name: String) -> Result<()> {
match self {
CacheEntry::Unparsed(p) => {
let p = std::mem::take(p).into_package(client, cache).await?;
*self = CacheEntry::Parsed(p);
}
pub async fn parse(&mut self, client: Client, name: String) -> Result<()> {
*self = CacheEntry::from(match self {
CacheEntry::Unparsed(p) => std::mem::take(p).into_package(client).await?,
CacheEntry::Manifest(m) => {
let m = ctx!(
std::mem::take(m).into_manifest(client, cache).await,
std::mem::take(m).try_into_async(client).await,
"parsing ParsedManifest into Manifest in get_package()"
)?;
let p = Package::from_manifest(m, name.clone());
*self = CacheEntry::Parsed(p);
Package::from_manifest(m, name.clone())
}
_ => {}
}
CacheEntry::Tarball(t) => {
Archive::new(std::mem::take(t))?
.into_package(client)
.await?
}
_ => return Ok(()),
});
Ok(())
}

Expand All @@ -178,4 +188,11 @@ impl CacheEntry {
_ => unreachable!(),
}
}

// pub fn get_bytes(&self) -> &Vec<u8> {
// match self {
// CacheEntry::Tarball(t) => t,
// _ => unreachable!(),
// }
// }
}

0 comments on commit dcc6b4e

Please sign in to comment.