Skip to content

Commit

Permalink
2.8.042
Browse files Browse the repository at this point in the history
  • Loading branch information
chapmanjacobd committed May 24, 2024
1 parent 77370f7 commit 92ae0f1
Show file tree
Hide file tree
Showing 6 changed files with 20 additions and 13 deletions.
2 changes: 1 addition & 1 deletion .github/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ To stop playing press Ctrl+C in either the terminal or mpv
<details><summary>List all subcommands</summary>

$ library
library (v2.8.041; 76 subcommands)
library (v2.8.042; 76 subcommands)

Create database subcommands:
╭───────────────┬──────────────────────────────────────────╮
Expand Down
2 changes: 1 addition & 1 deletion xklb/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "2.8.041"
__version__ = "2.8.042"
2 changes: 2 additions & 0 deletions xklb/playback/media_printer.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,8 @@ def media_printer(args, data, units=None, media_len=None) -> None:
printing.col_duration(media, k)
elif k.startswith("time_") or "_time_" in k:
printing.col_naturaltime(media, k)
elif k == "path" and not getattr(args, "no_url_decode", False):
printing.col_unquote_url(media, k)
elif k == "title_path":
media = [{"title_path": "\n".join(iterables.concat(d["title"], d["path"])), **d} for d in media]
media = [{k: v for k, v in d.items() if k not in ("title", "path")} for d in media]
Expand Down
5 changes: 4 additions & 1 deletion xklb/text/extract_links.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,8 @@ def print_or_download(args, a_ref):
if args.download:
web.download_url(link)
else:
if not args.no_url_decode:
link = web.url_decode(link).strip()
if args.print_link_text:
printing.pipe_print(f"{link}\t{link_text}")
else:
Expand All @@ -146,7 +148,8 @@ def extract_links() -> None:

if args.insert_only:
for url in arg_utils.gen_paths(args):
url = web.url_decode(url).strip()
if not args.no_url_decode:
url = web.url_decode(url).strip()
if args.download:
web.download_url(url)
else:
Expand Down
12 changes: 11 additions & 1 deletion xklb/utils/printing.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import humanize
from tabulate import tabulate

from xklb.utils import consts
from xklb.utils import consts, web


def print_overwrite(*text):
Expand Down Expand Up @@ -177,6 +177,16 @@ def col_duration(tbl: list[dict], col: str) -> list[dict]:
return tbl


def col_unquote_url(tbl: list[dict], col: str) -> list[dict]:
for idx, _d in enumerate(tbl):
val = tbl[idx].get(col)
if val is not None:
if val.startswith("http"):
tbl[idx][col] = web.safe_unquote(val)

return tbl


def wrap_paragraphs(text, width=80):
paragraphs = text.split("\n\n")
wrapped_paragraphs = [textwrap.fill(paragraph, width=width) for paragraph in paragraphs]
Expand Down
10 changes: 1 addition & 9 deletions xklb/utils/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -667,21 +667,13 @@ def construct_search(engine, s):


def construct_absolute_url(base_url, href):
href = safe_unquote(href)

up = urlparse(href)
if up.scheme and up.scheme not in ("https", "http", "ftp"):
return href

if not up.netloc:
href = urljoin(base_url, href)
href = urljoin(base_url + "/", href)

up = urlparse(href)
if up.netloc:
try:
href = href.replace(up.netloc, puny_decode(up.netloc), 1)
except Exception:
pass
return href


Expand Down

0 comments on commit 92ae0f1

Please sign in to comment.