Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: supports resumable uploads #343

Merged
merged 10 commits into from
Jan 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
20 changes: 17 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ Dufs is a distinctive utility file server that supports static serving, uploadin
- Download folder as zip file
- Upload files and folders (Drag & Drop)
- Create/Edit/Search files
- Partial responses (Parallel/Resume download)
- Resumable/partial uploads/downloads
- Access control
- Support https
- Support webdav
Expand Down Expand Up @@ -195,8 +195,22 @@ curl http://127.0.0.1:5000?json # output paths in json format
With authorization

```
curl http://192.168.8.10:5000/file --user user:pass # basic auth
curl http://192.168.8.10:5000/file --user user:pass --digest # digest auth
curl http://127.0.0.1:5000/file --user user:pass # basic auth
curl http://127.0.0.1:5000/file --user user:pass --digest # digest auth
```

Resumable downloads

```
curl -C- -o file http://127.0.0.1:5000/file
```

Resumable uploads

```
upload_offset=$(curl -I -s http://127.0.0.1:5000/file | tr -d '\r' | sed -n 's/content-length: //p')
dd skip=$upload_offset if=file status=none ibs=1 | \
curl -X PATCH -H "X-Update-Range: append" --data-binary @- http://127.0.0.1:5000/file
```

<details>
Expand Down
5 changes: 4 additions & 1 deletion assets/index.css
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,6 @@ body {
}

.cell-status span {
width: 80px;
display: inline-block;
}

Expand Down Expand Up @@ -239,6 +238,10 @@ body {
font-style: italic;
}

.retry-btn {
cursor: pointer;
}

@media (min-width: 768px) {
.path a {
min-width: 400px;
Expand Down
75 changes: 60 additions & 15 deletions assets/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,11 @@ const ICONS = {
view: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M4 0a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2V2a2 2 0 0 0-2-2zm0 1h8a1 1 0 0 1 1 1v12a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1"/></svg>`,
}

/**
* @type Map<string, Uploader>
*/
const failUploaders = new Map();

/**
* @type Element
*/
Expand Down Expand Up @@ -128,23 +133,24 @@ class Uploader {
/**
*
* @param {File} file
* @param {string[]} dirs
* @param {string[]} pathParts
*/
constructor(file, dirs) {
constructor(file, pathParts) {
/**
* @type Element
*/
this.$uploadStatus = null
this.uploaded = 0;
this.uploadOffset = 0;
this.lastUptime = 0;
this.name = [...dirs, file.name].join("/");
this.name = [...pathParts, file.name].join("/");
this.idx = Uploader.globalIdx++;
this.file = file;
this.url = newUrl(this.name);
}

upload() {
const { idx, name } = this;
const url = newUrl(name);
const { idx, name, url } = this;
const encodedName = encodedStr(name);
$uploadersTable.insertAdjacentHTML("beforeend", `
<tr id="upload${idx}" class="uploader">
Expand All @@ -160,51 +166,90 @@ class Uploader {
$emptyFolder.classList.add("hidden");
this.$uploadStatus = document.getElementById(`uploadStatus${idx}`);
this.$uploadStatus.innerHTML = '-';
this.$uploadStatus.addEventListener("click", e => {
const nodeId = e.target.id;
const matches = /^retry(\d+)$/.exec(nodeId);
if (matches) {
const id = parseInt(matches[1]);
let uploader = failUploaders.get(id);
if (uploader) uploader.retry();
}
});
Uploader.queues.push(this);
Uploader.runQueue();
}

ajax() {
const url = newUrl(this.name);
const { url } = this;

this.uploaded = 0;
this.lastUptime = Date.now();

const ajax = new XMLHttpRequest();
ajax.upload.addEventListener("progress", e => this.progress(e), false);
ajax.addEventListener("readystatechange", () => {
if (ajax.readyState === 4) {
if (ajax.status >= 200 && ajax.status < 300) {
this.complete();
} else {
this.fail();
if (ajax.status != 0) {
this.fail(`${ajax.status} ${ajax.statusText}`);
}
}
}
})
ajax.addEventListener("error", () => this.fail(), false);
ajax.addEventListener("abort", () => this.fail(), false);
ajax.open("PUT", url);
ajax.send(this.file);
if (this.uploadOffset > 0) {
ajax.open("PATCH", url);
ajax.setRequestHeader("X-Update-Range", "append");
ajax.send(this.file.slice(this.uploadOffset));
} else {
ajax.open("PUT", url);
ajax.send(this.file);
// setTimeout(() => ajax.abort(), 3000);
}
}

async retry() {
const { url } = this;
let res = await fetch(url, {
method: "HEAD",
});
let uploadOffset = 0;
if (res.status == 200) {
let value = res.headers.get("content-length");
uploadOffset = parseInt(value) || 0;
}
this.uploadOffset = uploadOffset;
this.ajax()
}

progress(event) {
const now = Date.now();
const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000;
const [speedValue, speedUnit] = formatSize(speed);
const speedText = `${speedValue} ${speedUnit}/s`;
const progress = formatPercent((event.loaded / event.total) * 100);
const progress = formatPercent(((event.loaded + this.uploadOffset) / this.file.size) * 100);
const duration = formatDuration((event.total - event.loaded) / speed)
this.$uploadStatus.innerHTML = `<span>${speedText}</span><span>${progress} ${duration}</span>`;
this.$uploadStatus.innerHTML = `<span style="width: 80px;">${speedText}</span><span>${progress} ${duration}</span>`;
this.uploaded = event.loaded;
this.lastUptime = now;
}

complete() {
this.$uploadStatus.innerHTML = `✓`;
const $uploadStatusNew = this.$uploadStatus.cloneNode(true);
$uploadStatusNew.innerHTML = `✓`;
this.$uploadStatus.parentNode.replaceChild($uploadStatusNew, this.$uploadStatus);
this.$uploadStatus = null;
failUploaders.delete(this.idx);
Uploader.runnings--;
Uploader.runQueue();
}

fail() {
this.$uploadStatus.innerHTML = `✗`;
fail(reason = "") {
this.$uploadStatus.innerHTML = `<span style="width: 20px;" title="${reason}">✗</span><span class="retry-btn" id="retry${this.idx}" title="Retry">↻</span>`;
failUploaders.set(this.idx, this);
Uploader.runnings--;
Uploader.runQueue();
}
Expand Down Expand Up @@ -801,7 +846,7 @@ function padZero(value, size) {
}

function formatSize(size) {
if (size == null) return []
if (size == null) return [0, "B"]
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (size == 0) return [0, "B"];
const i = parseInt(Math.floor(Math.log(size) / Math.log(1024)));
Expand Down
107 changes: 88 additions & 19 deletions src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ use std::time::SystemTime;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWrite};
use tokio::{fs, io};

use tokio_util::compat::FuturesAsyncWriteCompatExt;
use tokio_util::io::{ReaderStream, StreamReader};
use uuid::Uuid;
Expand All @@ -57,7 +58,8 @@ const INDEX_JS: &str = include_str!("../assets/index.js");
const FAVICON_ICO: &[u8] = include_bytes!("../assets/favicon.ico");
const INDEX_NAME: &str = "index.html";
const BUF_SIZE: usize = 65536;
const TEXT_MAX_SIZE: u64 = 4194304; // 4M
const EDITABLE_TEXT_MAX_SIZE: u64 = 4194304; // 4M
const RESUMABLE_UPLOAD_MIN_SIZE: u64 = 20971520; // 20M

pub struct Server {
args: Args,
Expand Down Expand Up @@ -327,10 +329,37 @@ impl Server {
set_webdav_headers(&mut res);
}
Method::PUT => {
if !allow_upload || (!allow_delete && is_file && size > 0) {
if is_dir || !allow_upload || (!allow_delete && size > 0) {
status_forbid(&mut res);
} else {
self.handle_upload(path, req, &mut res).await?;
self.handle_upload(path, None, size, req, &mut res).await?;
}
}
Method::PATCH => {
if is_miss {
status_not_found(&mut res);
} else if !allow_upload {
status_forbid(&mut res);
} else {
let offset = match parse_upload_offset(headers, size) {
Ok(v) => v,
Err(err) => {
status_bad_request(&mut res, &err.to_string());
return Ok(res);
}
};
match offset {
Some(offset) => {
if offset < size && !allow_delete {
status_forbid(&mut res);
}
self.handle_upload(path, Some(offset), size, req, &mut res)
.await?;
}
None => {
*res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
}
}
}
}
Method::DELETE => {
Expand Down Expand Up @@ -417,17 +446,27 @@ impl Server {
Ok(res)
}

async fn handle_upload(&self, path: &Path, req: Request, res: &mut Response) -> Result<()> {
async fn handle_upload(
&self,
path: &Path,
upload_offset: Option<u64>,
size: u64,
req: Request,
res: &mut Response,
) -> Result<()> {
ensure_path_parent(path).await?;

let mut file = match fs::File::create(&path).await {
Ok(v) => v,
Err(_) => {
status_forbid(res);
return Ok(());
let (mut file, status) = match upload_offset {
None => (fs::File::create(path).await?, StatusCode::CREATED),
Some(offset) if offset == size => (
fs::OpenOptions::new().append(true).open(path).await?,
StatusCode::NO_CONTENT,
),
Some(offset) => {
let mut file = fs::OpenOptions::new().write(true).open(path).await?;
file.seek(SeekFrom::Start(offset)).await?;
(file, StatusCode::NO_CONTENT)
}
};

let stream = IncomingStream::new(req.into_body());

let body_with_io_error = stream.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
Expand All @@ -436,13 +475,19 @@ impl Server {
pin_mut!(body_reader);

let ret = io::copy(&mut body_reader, &mut file).await;
let size = fs::metadata(path)
.await
.map(|v| v.len())
.unwrap_or_default();
if ret.is_err() {
tokio::fs::remove_file(&path).await?;

if upload_offset.is_none() && size < RESUMABLE_UPLOAD_MIN_SIZE {
let _ = tokio::fs::remove_file(&path).await;
}
ret?;
}

*res.status_mut() = StatusCode::CREATED;
*res.status_mut() = status;

Ok(())
}

Expand Down Expand Up @@ -830,7 +875,8 @@ impl Server {
);
let mut buffer: Vec<u8> = vec![];
file.take(1024).read_to_end(&mut buffer).await?;
let editable = meta.len() <= TEXT_MAX_SIZE && content_inspector::inspect(&buffer).is_text();
let editable =
meta.len() <= EDITABLE_TEXT_MAX_SIZE && content_inspector::inspect(&buffer).is_text();
let data = EditData {
href,
kind,
Expand Down Expand Up @@ -867,7 +913,7 @@ impl Server {
Some(v) => match v.to_str().ok().and_then(|v| v.parse().ok()) {
Some(v) => v,
None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
status_bad_request(res, "");
return Ok(());
}
},
Expand Down Expand Up @@ -1545,6 +1591,13 @@ fn status_no_content(res: &mut Response) {
*res.status_mut() = StatusCode::NO_CONTENT;
}

fn status_bad_request(res: &mut Response, body: &str) {
*res.status_mut() = StatusCode::BAD_REQUEST;
if !body.is_empty() {
*res.body_mut() = body_full(body.to_string());
}
}

fn set_content_diposition(res: &mut Response, inline: bool, filename: &str) -> Result<()> {
let kind = if inline { "inline" } else { "attachment" };
let filename: String = filename
Expand Down Expand Up @@ -1584,10 +1637,12 @@ fn is_hidden(hidden: &[String], file_name: &str, is_dir_type: bool) -> bool {
fn set_webdav_headers(res: &mut Response) {
res.headers_mut().insert(
"Allow",
HeaderValue::from_static("GET,HEAD,PUT,OPTIONS,DELETE,PROPFIND,COPY,MOVE"),
HeaderValue::from_static("GET,HEAD,PUT,OPTIONS,DELETE,PATCH,PROPFIND,COPY,MOVE"),
);
res.headers_mut().insert(
"DAV",
HeaderValue::from_static("1, 2, 3, sabredav-partialupdate"),
);
res.headers_mut()
.insert("DAV", HeaderValue::from_static("1,2"));
}

async fn get_content_type(path: &Path) -> Result<String> {
Expand Down Expand Up @@ -1620,3 +1675,17 @@ async fn get_content_type(path: &Path) -> Result<String> {
};
Ok(content_type)
}

fn parse_upload_offset(headers: &HeaderMap<HeaderValue>, size: u64) -> Result<Option<u64>> {
let value = match headers.get("x-update-range") {
Some(v) => v,
None => return Ok(None),
};
let err = || anyhow!("Invalid X-Updage-Range header");
let value = value.to_str().map_err(|_| err())?;
if value == "append" {
return Ok(Some(size));
}
let (start, _) = parse_range(value, size).ok_or_else(err)?;
Ok(Some(start))
}