Skip to content

Commit

Permalink
Merge pull request #232 from hkalexling/rc/0.24.0
Browse files Browse the repository at this point in the history
v0.24.0
  • Loading branch information
hkalexling committed Sep 25, 2021
2 parents e408398 + 9df372f commit 8a73280
Show file tree
Hide file tree
Showing 18 changed files with 659 additions and 119 deletions.
7 changes: 7 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,9 @@
node_modules
lib
Dockerfile
Dockerfile.arm32v7
Dockerfile.arm64v8
README.md
.all-contributorsrc
env.example
.github/
8 changes: 6 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ The official docker images are available on [Dockerhub](https://hub.docker.com/r
### CLI

```
Mango - Manga Server and Web Reader. Version 0.23.0
Mango - Manga Server and Web Reader. Version 0.24.0
Usage:
Expand Down Expand Up @@ -86,6 +86,10 @@ log_level: info
upload_path: ~/mango/uploads
plugin_path: ~/mango/plugins
download_timeout_seconds: 30
library_cache_path: ~/mango/library.yml.gz
cache_enabled: false
cache_size_mbs: 50
cache_log_enabled: true
disable_login: false
default_username: ""
auth_proxy_header_name: ""
Expand All @@ -97,12 +101,12 @@ mangadex:
download_queue_db_path: ~/mango/queue.db
chapter_rename_rule: '[Vol.{volume} ][Ch.{chapter} ]{title|id}'
manga_rename_rule: '{title}'
subscription_update_interval_hours: 24
```

- `scan_interval_minutes`, `thumbnail_generation_interval_hours` and `db_optimization_interval_hours` can be any non-negative integer. Setting them to `0` disables the periodic tasks
- `log_level` can be `debug`, `info`, `warn`, `error`, `fatal` or `off`. Setting it to `off` disables the logging
- You can disable authentication by setting `disable_login` to true. Note that `default_username` must be set to an existing username for this to work.
- By setting `cache_enabled` to `true`, you can enable an experimental feature where Mango caches library metadata to improve page load time. You can further fine-tune the feature with `cache_size_mbs` and `cache_log_enabled`.

### Library Structure

Expand Down
2 changes: 1 addition & 1 deletion shard.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name: mango
version: 0.23.0
version: 0.24.0

authors:
- Alex Ling <hkalexling@gmail.com>
Expand Down
10 changes: 10 additions & 0 deletions spec/util_spec.cr
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,13 @@ describe "chapter_sort" do
end.should eq ary
end
end

describe "sanitize_filename" do
it "returns a random string for empty sanitized string" do
sanitize_filename("..").should_not eq sanitize_filename("..")
end
it "sanitizes correctly" do
sanitize_filename(".. \n\v.\rマンゴー/|*()<[1/2] 3.14 hello world ")
.should eq "マンゴー_()[1_2] 3.14 hello world"
end
end
5 changes: 5 additions & 0 deletions src/config.cr
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ class Config
property session_secret : String = "mango-session-secret"
property library_path : String = File.expand_path "~/mango/library",
home: true
property library_cache_path = File.expand_path "~/mango/library.yml.gz",
home: true
property db_path : String = File.expand_path "~/mango/mango.db", home: true
property scan_interval_minutes : Int32 = 5
property thumbnail_generation_interval_hours : Int32 = 24
Expand All @@ -20,6 +22,9 @@ class Config
property plugin_path : String = File.expand_path "~/mango/plugins",
home: true
property download_timeout_seconds : Int32 = 30
property cache_enabled = false
property cache_size_mbs = 50
property cache_log_enabled = true
property disable_login = false
property default_username = ""
property auth_proxy_header_name = ""
Expand Down
188 changes: 188 additions & 0 deletions src/library/cache.cr
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
require "digest"

require "./entry"
require "./types"

# Base class for an entry in the LRU cache.
# There are two ways to use it:
# 1. Use it as it is by instantiating with the appropriate `SaveT` and
# `ReturnT`. Note that in this case, `SaveT` and `ReturnT` must be the
# same type. That is, the input value will be stored as it is without
# any transformation.
# 2. You can also subclass it and provide custom implementations for
# `to_save_t` and `to_return_t`. This allows you to transform and store
# the input value to a different type. See `SortedEntriesCacheEntry` as
# an example.
private class CacheEntry(SaveT, ReturnT)
getter key : String, atime : Time

@value : SaveT

def initialize(@key : String, value : ReturnT)
@atime = @ctime = Time.utc
@value = self.class.to_save_t value
end

def value
@atime = Time.utc
self.class.to_return_t @value
end

def self.to_save_t(value : ReturnT)
value
end

def self.to_return_t(value : SaveT)
value
end

def instance_size
instance_sizeof(CacheEntry(SaveT, ReturnT)) + # sizeof itself
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
@value.instance_size
end
end

class SortedEntriesCacheEntry < CacheEntry(Array(String), Array(Entry))
def self.to_save_t(value : Array(Entry))
value.map &.id
end

def self.to_return_t(value : Array(String))
ids_to_entries value
end

private def self.ids_to_entries(ids : Array(String))
e_map = Library.default.deep_entries.to_h { |entry| {entry.id, entry} }
entries = [] of Entry
begin
ids.each do |id|
entries << e_map[id]
end
return entries if ids.size == entries.size
rescue
end
end

def instance_size
instance_sizeof(SortedEntriesCacheEntry) + # sizeof itself
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
@value.size * (instance_sizeof(String) + sizeof(String)) +
@value.sum(&.bytesize) # elements in Array(String)
end

def self.gen_key(book_id : String, username : String,
entries : Array(Entry), opt : SortOptions?)
entries_sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
user_context = opt && opt.method == SortMethod::Progress ? username : ""
sig = Digest::SHA1.hexdigest (book_id + entries_sig + user_context +
(opt ? opt.to_tuple.to_s : "nil"))
"#{sig}:sorted_entries"
end
end

class String
def instance_size
instance_sizeof(String) + bytesize
end
end

struct Tuple(*T)
def instance_size
sizeof(T) + # total size of non-reference types
self.sum do |e|
next 0 unless e.is_a? Reference
if e.responds_to? :instance_size
e.instance_size
else
instance_sizeof(typeof(e))
end
end
end
end

alias CacheableType = Array(Entry) | String | Tuple(String, Int32)
alias CacheEntryType = SortedEntriesCacheEntry |
CacheEntry(String, String) |
CacheEntry(Tuple(String, Int32), Tuple(String, Int32))

def generate_cache_entry(key : String, value : CacheableType)
if value.is_a? Array(Entry)
SortedEntriesCacheEntry.new key, value
else
CacheEntry(typeof(value), typeof(value)).new key, value
end
end

# LRU Cache
class LRUCache
@@limit : Int128 = Int128.new 0
@@should_log = true
# key => entry
@@cache = {} of String => CacheEntryType

def self.enabled
Config.current.cache_enabled
end

def self.init
cache_size = Config.current.cache_size_mbs
@@limit = Int128.new cache_size * 1024 * 1024 if enabled
@@should_log = Config.current.cache_log_enabled
end

def self.get(key : String)
return unless enabled
entry = @@cache[key]?
if @@should_log
Logger.debug "LRUCache #{entry.nil? ? "miss" : "hit"} #{key}"
end
return entry.value unless entry.nil?
end

def self.set(cache_entry : CacheEntryType)
return unless enabled
key = cache_entry.key
@@cache[key] = cache_entry
Logger.debug "LRUCache cached #{key}" if @@should_log
remove_least_recent_access
end

def self.invalidate(key : String)
return unless enabled
@@cache.delete key
end

def self.print
return unless @@should_log
sum = @@cache.sum { |_, entry| entry.instance_size }
Logger.debug "---- LRU Cache ----"
Logger.debug "Size: #{sum} Bytes"
Logger.debug "List:"
@@cache.each do |k, v|
Logger.debug "#{k} | #{v.atime} | #{v.instance_size}"
end
Logger.debug "-------------------"
end

private def self.is_cache_full
sum = @@cache.sum { |_, entry| entry.instance_size }
sum > @@limit
end

private def self.remove_least_recent_access
if @@should_log && is_cache_full
Logger.debug "Removing entries from LRUCache"
end
while is_cache_full && @@cache.size > 0
min_tuple = @@cache.min_by { |_, entry| entry.atime }
min_key = min_tuple[0]
min_entry = min_tuple[1]

Logger.debug " \
Target: #{min_key}, \
Last Access Time: #{min_entry.atime}" if @@should_log
invalidate min_key
end
end
end
45 changes: 29 additions & 16 deletions src/library/entry.cr
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
require "image_size"
require "yaml"

class Entry
include YAML::Serializable

getter zip_path : String, book : Title, title : String,
size : String, pages : Int32, id : String, encoded_path : String,
encoded_title : String, mtime : Time, err_msg : String?
Expand Down Expand Up @@ -46,31 +49,23 @@ class Entry
file.close
end

def to_slim_json : String
def build_json(*, slim = false)
JSON.build do |json|
json.object do
{% for str in ["zip_path", "title", "size", "id"] %}
json.field {{str}}, @{{str.id}}
{% end %}
json.field "title_id", @book.id
json.field "pages" { json.number @pages }
unless slim
json.field "display_name", @book.display_name @title
json.field "cover_url", cover_url
json.field "mtime" { json.number @mtime.to_unix }
end
end
end
end

def to_json(json : JSON::Builder)
json.object do
{% for str in ["zip_path", "title", "size", "id"] %}
json.field {{str}}, @{{str.id}}
{% end %}
json.field "title_id", @book.id
json.field "display_name", @book.display_name @title
json.field "cover_url", cover_url
json.field "pages" { json.number @pages }
json.field "mtime" { json.number @mtime.to_unix }
end
end

def display_name
@book.display_name @title
end
Expand All @@ -81,9 +76,17 @@ class Entry

def cover_url
return "#{Config.current.base_url}img/icon.png" if @err_msg

unless @book.entry_cover_url_cache
TitleInfo.new @book.dir do |info|
@book.entry_cover_url_cache = info.entry_cover_url
end
end
entry_cover_url = @book.entry_cover_url_cache

url = "#{Config.current.base_url}api/cover/#{@book.id}/#{@id}"
TitleInfo.new @book.dir do |info|
info_url = info.entry_cover_url[@title]?
if entry_cover_url
info_url = entry_cover_url[@title]?
unless info_url.nil? || info_url.empty?
url = File.join Config.current.base_url, info_url
end
Expand Down Expand Up @@ -170,6 +173,16 @@ class Entry
# For backward backward compatibility with v0.1.0, we save entry titles
# instead of IDs in info.json
def save_progress(username, page)
LRUCache.invalidate "#{@book.id}:#{username}:progress_sum"
@book.parents.each do |parent|
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
end
[false, true].each do |ascend|
sorted_entries_cache_key = SortedEntriesCacheEntry.gen_key @book.id,
username, @book.entries, SortOptions.new(SortMethod::Progress, ascend)
LRUCache.invalidate sorted_entries_cache_key
end

TitleInfo.new @book.dir do |info|
if info.progress[username]?.nil?
info.progress[username] = {@title => page}
Expand Down

0 comments on commit 8a73280

Please sign in to comment.