Skip to content
Permalink
Browse files

Add support for translations

  • Loading branch information...
omarroth committed Dec 20, 2018
1 parent 5b2b026 commit a160c645c9c6299553a7cc0c89e5182f5d1288bb

Large diffs are not rendered by default.

@@ -28,15 +28,15 @@ def get_channel(id, db, refresh = true, pull_all_videos = true)
channel = db.query_one("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)

if refresh && Time.now - channel.updated > 10.minutes
channel = fetch_channel(id, client, db, pull_all_videos)
channel = fetch_channel(id, client, db, pull_all_videos: pull_all_videos)
channel_array = channel.to_a
args = arg_array(channel_array)

db.exec("INSERT INTO channels VALUES (#{args}) \
ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", channel_array)
end
else
channel = fetch_channel(id, client, db, pull_all_videos)
channel = fetch_channel(id, client, db, pull_all_videos: pull_all_videos)
channel_array = channel.to_a
args = arg_array(channel_array)

@@ -46,13 +46,13 @@ def get_channel(id, db, refresh = true, pull_all_videos = true)
return channel
end

def fetch_channel(ucid, client, db, pull_all_videos = true)
def fetch_channel(ucid, client, db, pull_all_videos = true, locale = nil)
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}").body
rss = XML.parse_html(rss)

author = rss.xpath_node(%q(//feed/title))
if !author
raise "Deleted or invalid channel"
raise translate(locale, "Deleted or invalid channel")
end
author = author.content

@@ -223,7 +223,7 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "
return url
end

def get_about_info(ucid)
def get_about_info(ucid, locale)
client = make_client(YT_URL)

about = client.get("/channel/#{ucid}/about?disable_polymer=1&gl=US&hl=en")
@@ -234,14 +234,14 @@ def get_about_info(ucid)
about = XML.parse_html(about.body)

if about.xpath_node(%q(//div[contains(@class, "channel-empty-message")]))
error_message = "This channel does not exist."
error_message = translate(locale, "This channel does not exist.")

raise error_message
end

if about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).try &.content.empty?
error_message = about.xpath_node(%q(//div[@class="yt-alert-content"])).try &.content.strip
error_message ||= "Could not get channel info."
error_message ||= translate(locale, "Could not get channel info.")

raise error_message
end
@@ -56,7 +56,7 @@ class RedditListing
})
end

def fetch_youtube_comments(id, continuation, proxies, format)
def fetch_youtube_comments(id, continuation, proxies, format, locale)
client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
headers = HTTP::Headers.new
@@ -133,7 +133,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
response = JSON.parse(response.body)

if !response["response"]["continuationContents"]?
raise "Could not fetch comments"
raise translate(locale, "Could not fetch comments")
end

response = response["response"]["continuationContents"]
@@ -214,7 +214,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
json.field "content", content
json.field "contentHtml", content_html
json.field "published", published.to_unix
json.field "publishedText", "#{recode_date(published)} ago"
json.field "publishedText", translate(locale, "`x` ago", recode_date(published))
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]

@@ -250,7 +250,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)

if format == "html"
comments = JSON.parse(comments)
content_html = template_youtube_comments(comments)
content_html = template_youtube_comments(comments, locale)

comments = JSON.build do |json|
json.object do
@@ -296,7 +296,7 @@ def fetch_reddit_comments(id)
return comments, thread
end

def template_youtube_comments(comments)
def template_youtube_comments(comments, locale)
html = ""

root = comments["comments"].as_a
@@ -308,7 +308,7 @@ def template_youtube_comments(comments)
<div class="pure-u-23-24">
<p>
<a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
onclick="get_youtube_replies(this)">View #{child["replies"]["replyCount"]} replies</a>
onclick="get_youtube_replies(this)">#{translate(locale, "View `x` replies", child["replies"]["replyCount"].to_s)}</a>
</p>
</div>
</div>
@@ -328,7 +328,7 @@ def template_youtube_comments(comments)
<a href="#{child["authorUrl"]}">#{child["author"]}</a>
</b>
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
#{recode_date(Time.unix(child["published"].as_i64))} ago
#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64)))}
|
<i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
</p>
@@ -344,7 +344,7 @@ def template_youtube_comments(comments)
<div class="pure-u-1">
<p>
<a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
onclick="get_youtube_replies(this, true)">Load more</a>
onclick="get_youtube_replies(this, true)">#{translate(locale, "Load more")}</a>
</p>
</div>
</div>
@@ -354,7 +354,7 @@ def template_youtube_comments(comments)
return html
end

def template_reddit_comments(root)
def template_reddit_comments(root, locale)
html = ""
root.each do |child|
if child.data.is_a?(RedditComment)
@@ -366,15 +366,15 @@ def template_reddit_comments(root)
replies_html = ""
if child.replies.is_a?(RedditThing)
replies = child.replies.as(RedditThing)
replies_html = template_reddit_comments(replies.data.as(RedditListing).children)
replies_html = template_reddit_comments(replies.data.as(RedditListing).children, locale)
end

content = <<-END_HTML
<p>
<a href="javascript:void(0)" onclick="toggle_parent(this)">[ - ]</a>
<b><a href="https://www.reddit.com/user/#{author}">#{author}</a></b>
#{number_with_separator(score)} points
#{recode_date(child.created_utc)} ago
#{translate(locale, "`x` points", number_with_separator(score))}
#{translate(locale, "`x` ago", recode_date(child.created_utc))}
</p>
<div>
#{body_html}
@@ -0,0 +1,23 @@
def load_locale(name)
return JSON.parse(File.read("locales/#{name}.json")).as_h
end

def translate(locale : Hash(String, JSON::Any) | Nil, translation : String, text : String | Nil = nil)
if !locale
return translation
end

# if !locale[translation]?
# puts "Could not find translation for #{translation}"
# end

if locale[translation]? && !locale[translation].as_s.empty?
translation = locale[translation].as_s
end

if text
translation = translation.gsub("`x`", text)
end

return translation
end
@@ -18,7 +18,7 @@ class Mix
})
end

def fetch_mix(rdid, video_id, cookies = nil)
def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
client = make_client(YT_URL)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"
@@ -32,11 +32,11 @@ def fetch_mix(rdid, video_id, cookies = nil)
if yt_data
yt_data = JSON.parse(yt_data["data"].rchop(";"))
else
raise "Could not create mix."
raise translate(locale, "Could not create mix.")
end

if !yt_data["contents"]["twoColumnWatchNextResults"]["playlist"]?
raise "Could not create mix."
raise translate(locale, "Could not create mix.")
end

playlist = yt_data["contents"]["twoColumnWatchNextResults"]["playlist"]["playlist"]
@@ -70,7 +70,7 @@ def fetch_mix(rdid, video_id, cookies = nil)
end

if !cookies
next_page = fetch_mix(rdid, videos[-1].id, response.cookies)
next_page = fetch_mix(rdid, videos[-1].id, response.cookies, locale)
videos += next_page.videos
end

@@ -26,7 +26,7 @@ class Playlist
})
end

def fetch_playlist_videos(plid, page, video_count, continuation = nil)
def fetch_playlist_videos(plid, page, video_count, continuation = nil, locale = nil)
client = make_client(YT_URL)

if continuation
@@ -48,7 +48,7 @@ def fetch_playlist_videos(plid, page, video_count, continuation = nil)
response = client.get(url)
response = JSON.parse(response.body)
if !response["content_html"]? || response["content_html"].as_s.empty?
raise "Playlist is empty"
raise translate(locale, "Playlist is empty")
end

document = XML.parse_html(response["content_html"].as_s)
@@ -105,14 +105,14 @@ def extract_playlist(plid, nodeset, index)
end

videos << PlaylistVideo.new(
title,
id,
author,
ucid,
length_seconds,
Time.now,
[plid],
index + offset,
title: title,
id: id,
author: author,
ucid: ucid,
length_seconds: length_seconds,
published: Time.now,
playlists: [plid],
index: index + offset,
)
end

@@ -155,7 +155,7 @@ def produce_playlist_url(id, index)
return url
end

def fetch_playlist(plid)
def fetch_playlist(plid, locale)
client = make_client(YT_URL)

if plid.starts_with? "UC"
@@ -164,7 +164,7 @@ def fetch_playlist(plid)

response = client.get("/playlist?list=#{plid}&hl=en&disable_polymer=1")
if response.status_code != 200
raise "Invalid playlist."
raise translate(locale, "Invalid playlist.")
end

body = response.body.gsub(%(
@@ -175,7 +175,7 @@ def fetch_playlist(plid)

title = document.xpath_node(%q(//h1[@class="pl-header-title"]))
if !title
raise "Playlist does not exist."
raise translate(locale, "Playlist does not exist.")
end
title = title.content.strip(" \n")

@@ -1,4 +1,4 @@
def fetch_trending(trending_type, proxies, region)
def fetch_trending(trending_type, proxies, region, locale)
client = make_client(YT_URL)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
@@ -16,7 +16,7 @@ def fetch_trending(trending_type, proxies, region)
if yt_data
yt_data = JSON.parse(yt_data["data"].rchop(";"))
else
raise "Could not pull trending pages."
raise translate(locale, "Could not pull trending pages.")
end

tabs = yt_data["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][0]["tabRenderer"]["content"]["sectionListRenderer"]["subMenu"]["channelListSubMenuRenderer"]["contents"].as_a

0 comments on commit a160c64

Please sign in to comment.
You can’t perform that action at this time.