Skip to content

Commit

Permalink
Fix #42
Browse files Browse the repository at this point in the history
 * Add stream_large_files option to avoid caching large files (download)
 * Add large_file_threshold_mb to determine if a file is to be streamed
  • Loading branch information
astrada committed Sep 30, 2014
1 parent b936e5c commit d9412b3
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 6 deletions.
31 changes: 27 additions & 4 deletions src/config.ml
Expand Up @@ -69,6 +69,11 @@ type t = {
(* Files removed from trash folder are permanently deleted (no recovery
* possible) *)
delete_forever_in_trash_folder : bool;
(* Specifies whether large files should not be cached (they will be directly
* streamed) *)
stream_large_files : bool;
(* Specifies the minimum size (in megabytes) of large files *)
large_file_threshold_mb : int;
}

let debug = {
Expand Down Expand Up @@ -147,10 +152,6 @@ let new_revision = {
GapiLens.get = (fun x -> x.new_revision);
GapiLens.set = (fun v x -> { x with new_revision = v })
}
let new_revision = {
GapiLens.get = (fun x -> x.new_revision);
GapiLens.set = (fun v x -> { x with new_revision = v })
}
let curl_debug_off = {
GapiLens.get = (fun x -> x.curl_debug_off);
GapiLens.set = (fun v x -> { x with curl_debug_off = v })
Expand All @@ -159,6 +160,14 @@ let delete_forever_in_trash_folder = {
GapiLens.get = (fun x -> x.delete_forever_in_trash_folder);
GapiLens.set = (fun v x -> { x with delete_forever_in_trash_folder = v })
}
let stream_large_files = {
GapiLens.get = (fun x -> x.stream_large_files);
GapiLens.set = (fun v x -> { x with stream_large_files = v })
}
let large_file_threshold_mb = {
GapiLens.get = (fun x -> x.large_file_threshold_mb);
GapiLens.set = (fun v x -> { x with large_file_threshold_mb = v })
}

let umask =
let prev_umask = Unix.umask 0 in
Expand Down Expand Up @@ -187,6 +196,8 @@ let default = {
new_revision = true;
curl_debug_off = false;
delete_forever_in_trash_folder = false;
stream_large_files = false;
large_file_threshold_mb = 16;
}

let default_debug = {
Expand All @@ -211,6 +222,8 @@ let default_debug = {
new_revision = true;
curl_debug_off = false;
delete_forever_in_trash_folder = false;
stream_large_files = false;
large_file_threshold_mb = 16;
}

let of_table table =
Expand Down Expand Up @@ -254,6 +267,12 @@ let of_table table =
delete_forever_in_trash_folder =
get "delete_forever_in_trash_folder" bool_of_string
default.delete_forever_in_trash_folder;
stream_large_files =
get "stream_large_files" bool_of_string
default.stream_large_files;
large_file_threshold_mb =
get "large_file_threshold_mb" int_of_string
default.large_file_threshold_mb;
}

let to_table data =
Expand Down Expand Up @@ -282,6 +301,10 @@ let to_table data =
add "curl_debug_off" (data.curl_debug_off |> string_of_bool);
add "delete_forever_in_trash_folder"
(data.delete_forever_in_trash_folder |> string_of_bool);
add "stream_large_files"
(data.stream_large_files |> string_of_bool);
add "large_file_threshold_mb"
(data.large_file_threshold_mb |> string_of_int);
table

let debug_print out_ch start_time curl info_type info =
Expand Down
52 changes: 50 additions & 2 deletions src/drive.ml
Expand Up @@ -33,6 +33,7 @@ let trash_directory_name_length = String.length trash_directory
let trash_directory_base_path = "/.Trash/"
let f_bsize = 4096L
let change_id_limit = 50L
let mb = 1048576L

(* Utilities *)
let chars_blacklist_regexp = Str.regexp "[/\000]"
Expand Down Expand Up @@ -337,7 +338,6 @@ let update_cache_size new_size metadata cache =

let shrink_cache file_size max_cache_size_mb metadata cache =
let check_cache_size cache_size =
let mb = 1048576L in
let max_cache_size = Int64.mul (Int64.of_int max_cache_size_mb) mb in
cache_size <= max_cache_size
in
Expand Down Expand Up @@ -992,6 +992,38 @@ let download_resource resource =
end >>
SessionM.return content_path

let stream_resource offset buffer resource =
let download_link = Option.get resource.Cache.Resource.download_url in
let length = Bigarray.Array1.dim buffer in
let finish = Int64.add offset (Int64.of_int (length - 1)) in
Utils.log_message
"Stream resource (id=%Ld,offset=%Ld,finish=%Ld,length=%d)...%!"
resource.Cache.Resource.id offset finish length;
with_try
(let media_destination = GapiMediaResource.ArrayBuffer buffer in
GapiService.download_resource
~ranges:[(Some offset, Some finish)]
download_link
media_destination)
(function
GapiRequest.PermissionDenied session ->
Utils.log_message "Server error: Permission denied.\n%!";
throw Permission_denied
| GapiRequest.RequestTimeout _ ->
Utils.log_message "Server error: Request Timeout.\n%!";
throw Resource_busy
| GapiRequest.PreconditionFailed _
| GapiRequest.Conflict _ ->
Utils.log_message "Server error: Conflict.\n%!";
throw Resource_busy
| GapiRequest.Forbidden _ ->
Utils.log_message "Server error: Forbidden.\n%!";
throw Resource_busy
| e -> throw e)
>>= fun () ->
Utils.log_message "done\n%!";
SessionM.return ()

let is_filesystem_read_only () =
Context.get_ctx () |. Context.config_lens |. Config.read_only

Expand Down Expand Up @@ -1348,18 +1380,34 @@ let utime path atime mtime =
(* read *)
let read path buf offset file_descr =
let (path_in_cache, trashed) = get_path_in_cache path in
let config = Context.get_ctx () |. Context.config_lens in
let large_file_threshold_mb = config |. Config.large_file_threshold_mb in
let large_file_threshold =
Int64.mul (Int64.of_int large_file_threshold_mb) mb in
let request_resource =
get_resource path_in_cache trashed >>= fun resource ->
with_retry download_resource resource
let to_stream = config |. Config.stream_large_files &&
not (Cache.Resource.is_document resource) &&
resource.Cache.Resource.state = Cache.Resource.State.ToDownload &&
(Option.default 0L resource.Cache.Resource.file_size) >
large_file_threshold in
if to_stream then
with_retry (stream_resource offset buf) resource >>= fun () ->
SessionM.return ""
else
with_retry download_resource resource
in
let content_path = do_request request_resource |> fst in
if content_path <> "" then
Utils.with_in_channel content_path
(fun ch ->
let file_descr = Unix.descr_of_in_channel ch in
Unix.LargeFile.lseek file_descr offset Unix.SEEK_SET |> ignore;
Unix_util.read file_descr buf)
else
Bigarray.Array1.dim buf
(* END read *)
(* write *)
Expand Down

0 comments on commit d9412b3

Please sign in to comment.