Skip to content

Commit

Permalink
Release 1.1.3
Browse files Browse the repository at this point in the history
  • Loading branch information
markasoftware committed Dec 3, 2020
2 parents 50e5a27 + b3b9181 commit 82721ed
Show file tree
Hide file tree
Showing 5 changed files with 184 additions and 52 deletions.
17 changes: 17 additions & 0 deletions .github/workflows/offline.yml
@@ -0,0 +1,17 @@
name: Offline Checks
on: [push, pull_request]
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Shellcheck
uses: ludeeus/action-shellcheck@1.0.0
unit_test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Unit Tests
run: ./test.sh
173 changes: 130 additions & 43 deletions anypaste
Expand Up @@ -6,8 +6,8 @@

# shellcheck disable=2128

export ap_version ap_mac ap_path ap_human_name ap_mime ap_size ap_plugin ap_last_stdout
ap_version_text='Anypaste 1.1.2'
export ap_version ap_mac ap_path ap_human_name ap_human_name_escaped ap_mime ap_size ap_plugin ap_last_stdout
ap_version_text='Anypaste 1.1.3'
[[ $OSTYPE == darwin* ]] && ap_mac='true' || ap_mac='false'
ap_tmpdir=${TMPDIR:-/tmp}
shopt -s extglob
Expand All @@ -25,11 +25,11 @@ function check_size {
}

function curl_form_upload {
curl -#fF "$1=@\"$ap_path\";type=$ap_mime" "$2" || { echo "${ap_ERROR}ERROR: Upload request did not return HTTP 200!${ap_RESET}" >&2 && return 1; }
curl -#fF "$1=@\"$ap_path\";type=$ap_mime" -A "$ap_ua" "$2" || { echo "${ap_ERROR}ERROR: Upload request did not return HTTP 200!${ap_RESET}" >&2 && return 1; }
}

function curl_file_upload {
curl "-X$1" -#fT "$2" "$3" || { echo "${ap_ERROR}ERROR: Upload request did not return HTTP 200!${ap_RESET}" >&2 && return 1; }
curl "-X$1" -#fT "$2" -A "$ap_ua" "$3" || { echo "${ap_ERROR}ERROR: Upload request did not return HTTP 200!${ap_RESET}" >&2 && return 1; }
}

# string json, string key -> string value
Expand All @@ -55,20 +55,28 @@ function json_parse {
echo "$infernal_agony"
}

# @param ALL env vars about a path
# @return $ap_find_extension_return the "ideal" file extension, will be the real extension
# if it has one, or otherwise will try to determine based on metadata
function ap_find_extension {
local format_name_line
# ffmpeg provides the best file extensions
if [[ -n $ap_ffprobe && $ap_ffprobe == *probe_score=100* ]]
then
format_name_line=$(grep 'format_name=' <<< "$ap_ffprobe")
ap_find_extension_return=${format_name_line##*=}
else
# attempt to use actual file extension, if it has one, otherwise null
[[ $ap_path == */+([^/]).+([^/]) ]] && ap_find_extension_return=${ap_path##*.} || ap_find_extension_return=
fi
# string -> url-encoded string
# param unencoded string
# @return $ap_url_encode_return the encoded string
# from https://stackoverflow.com/a/10660730
ap_url_encode() {
local strlen=${#1}
ap_url_encode_return=''
local pos c o

for (( pos=0 ; pos<strlen ; pos++ ))
do
c=${1:$pos:1}
case "$c" in
[-_.~a-zA-Z0-9])
o=$c
;;
*)
printf -v o '%%%02x' "'$c"
;;
esac
ap_url_encode_return+=$o
done
}

# @return exit code is 0 if input is a video (NOT a motion JPEG or animated GIF)
Expand Down Expand Up @@ -176,7 +184,8 @@ function transfersh {
check_size 10000000000
;;
upload)
transfersh_link=$(curl_file_upload 'PUT' "$ap_path" "https://transfer.sh/$ap_human_name") || return 1
transfersh_host=${transfersh_host:-https://transfersh.com}
transfersh_link=$(curl_file_upload 'PUT' "$ap_path" "$transfersh_host/$ap_human_name_escaped") || return 1
echo 'Reminder: transfer.sh uploads are deleted after 14 days!' >&2
echo 'Transfer.sh links are *not* direct if you use a browser, but they can be curled!' >&2;
echo >&2
Expand All @@ -190,37 +199,39 @@ function transfersh {
echo 'Generic-file host with a popular, official CLI client. No ads.'
echo '[tags]'
echo 'private'
echo '[config]'
echo 'optional|transfersh_host|Protocol, domain name, and host of a transfer.sh site (default: https://transfersh.com, since the original is down)'
;;
esac
}

function vgyme {
function keepsh {
case $1 in
check_eligibility)
# This is literally the worst possible way to design how the quotes should be used in a regex
[[ $ap_mime =~ 'image/jpeg'|'image/png'|'image/gif'|'image/x-ms-bmp' ]] && check_size 20000000
check_size 500000000
;;
upload)
vgyme_json=$(curl_form_upload 'file' 'https://vgy.me/upload') || return 1
vgyme_link=$(json_parse "$vgyme_json" 'url')
vgyme_direct=$(json_parse "$vgyme_json" 'image')
vgyme_delete=$(json_parse "$vgyme_json" 'delete')
# ua spoof required
local ap_ua_save=$ap_ua
ap_ua='curl 7.64.0'
keepsh_link=$(curl_file_upload PUT "$ap_path" "https://free.keep.sh") || return 1
ap_ua=$ap_ua_save
echo 'Reminder: keep.sh uploads are deleted after 24 hours!' >&2
echo "Keep.sh's direct links cannot be curled, but work elsewhere."
echo >&2
echo "Link: $vgyme_link"
echo "Direct: $vgyme_direct"
echo "Delete: $vgyme_delete"
echo "Link: $keepsh_link"
echo "Direct: $keepsh_link/download"
echo
;;
get_info)
echo '[name]'
echo 'vgy.me'
echo 'keep.sh'
echo '[description]'
echo 'Image host, supports direct links and some editing.'
echo 'Generic file host that officially supports curl. Free edition.'
echo '[tags]'
echo 'private'
echo 'editable'
echo 'deletable'
echo 'direct'
echo 'deletable'
;;
esac
}
Expand All @@ -245,6 +256,7 @@ function tinyimg {
echo '[tags]'
echo 'private'
echo 'direct'
echo 'permanent'
;;
esac
}
Expand All @@ -257,6 +269,7 @@ function imgur {
upload)
imgur_json=$(curl -#fF "image=@\"$ap_path\"" -F "name=\"$ap_human_name\"" \
-H "Authorization: Client-ID ${imgur_client_id:-c7e65b324a5ebe8}" \
-A "$ap_ua" \
'https://api.imgur.com/3/image') || return 1
imgur_json=${imgur_json//\\/}
imgur_id=$(json_parse "$imgur_json" 'id')
Expand Down Expand Up @@ -292,7 +305,7 @@ function clyp {
check_size 5000000
;;
upload)
clyp_json=$(curl -#fF "audioFile=@\"$ap_path\"" -H 'X-Client-Type: WebAlfa' \
clyp_json=$(curl -#fF "audioFile=@\"$ap_path\"" -H 'X-Client-Type: WebAlfa' -A "$ap_ua" \
https://upload.clyp.it/upload) || return 1
clyp_link=$(json_parse "$clyp_json" 'Url')
clyp_mp3_direct_link=$(json_parse "$clyp_json" 'SecureMp3Url')
Expand Down Expand Up @@ -320,7 +333,7 @@ function streamable {
;;
upload)
# shellcheck disable=2154
streamable_json=$(curl -#fu "$streamable_email:$streamable_password" -F "file=@$ap_path" https://api.streamable.com/upload) || return 1
streamable_json=$(curl -#fu "$streamable_email:$streamable_password" -F "file=@$ap_path" -A "$ap_ua" https://api.streamable.com/upload) || return 1
streamable_shortcode=$(json_parse "$streamable_json" 'shortcode')
echo >&2
echo "Link: https://streamable.com/$streamable_shortcode"
Expand Down Expand Up @@ -350,7 +363,7 @@ function sendvid {
# Sendvid won't upload videos with certain file extensions, but will still encode them
# and function properly if uploaded as a .mp4
# Maybe time for them to switch to mime type checking?
sendvid_json=$(curl -#fF "video=@$ap_path;filename=${ap_human_name%.*}.mp4" http://sendvid.com/api/v1/videos) || return 1
sendvid_json=$(curl -#fF "video=@\"$ap_path\";filename=${ap_human_name%.*}.mp4" -A "$ap_ua" https://sendvid.com/api/v1/videos) || return 1
sendvid_pub=$(json_parse "$sendvid_json" 'slug')
sendvid_priv=$(json_parse "$sendvid_json" 'secret')
echo >&2
Expand Down Expand Up @@ -385,7 +398,7 @@ function gfycat {

# this is one of the more complex ones
# get the key/name of the gfy
gfy_init_json=$(curl -sfXPOST https://api.gfycat.com/v1/gfycats) || {
gfy_init_json=$(curl -sfXPOST -A "$ap_ua" https://api.gfycat.com/v1/gfycats) || {
echo 'Getting gfycat key did not return HTTP 200!' >&2
return 1;
}
Expand All @@ -406,7 +419,7 @@ function gfycat {
while true
do
sleep 4
gfy_status=$(curl -fs "https://api.gfycat.com/v1/gfycats/fetch/status/$gfy_name") || {
gfy_status=$(curl -fs -A "$ap_ua" "https://api.gfycat.com/v1/gfycats/fetch/status/$gfy_name") || {
echo "${ap_ERROR}Status check request did not return HTTP 200!" >&2
echo "Your file might end up here anyway: https://gfycat.com/$gfy_name${ap_RESET}" >&2
return 1;
Expand Down Expand Up @@ -441,7 +454,7 @@ function docdroid {
;;
upload)
# shellcheck disable=2154
docdroid_json=$(curl -H "Authorization: Bearer $docdroid_access_token" -#fF "file=@$ap_path" https://docdroid.net/api/document)
docdroid_json=$(curl -H "Authorization: Bearer $docdroid_access_token" -#fF "file=@$ap_path" -A "$ap_ua" https://docdroid.net/api/document)
docdroid_id=$(json_parse "$docdroid_json" 'id')
docdroid_filename=$(json_parse "$docdroid_json" 'filename')
echo >&2
Expand All @@ -462,6 +475,76 @@ function docdroid {
esac
}

function filemail {
case $1 in
check_eligibility)
# 50GB??? I'll believe it when I see it...
check_size 50000000000
;;
upload)
local id key transfer_global_url transfer_base_url init chunk_count complete link direct
filemail_chunk_size=${filemail_chunk_size:-5242880}
filemail_job_count=${filemail_job_count:-4}

chunk_count=$(( ap_size / filemail_chunk_size ))
(( ap_size % filemail_chunk_size != 0)) && (( chunk_count++ ))
init=$(curl -sf --data 'sourcedetails=plupload(html5)+%40+https%3A%2F%2Fwww.filemail.com%2F&days=7&confirmation=true' -A "$ap_ua" 'https://www.filemail.com/api/transfer/initialize')
if [[ $? -eq 22 ]]
then
echo >&2
echo "${ap_ERROR}Filemail initialization failed!" >&2
echo "More likely than not, you exceeded your daily limit of two uploads per IP.$ap_RESET" >&2
echo >&2
return 1
fi
id=$(json_parse "$init" 'transferid')
key=$(json_parse "$init" 'transferkey')
transfer_base_url=$(json_parse "$init" 'transferurl')
# Other required fields are per-chunk and set in chunk loop
transfer_global_url="$transfer_base_url?transferid=$id&transferkey=$key&runtime=html5&chunksize=$filemail_chunk_size&thefilename=$ap_human_name_escaped&totalsize=$ap_size&chunks=$chunk_count&retry=0"
echo >&2
# shellcheck disable=2154
seq 0 $(( chunk_count - 1 )) | xargs -L1 -P "$filemail_job_count" bash -c "
# we can't use ap_mktemp here because functions are ephemeral
chunk_tmp_file=\$(mktemp -p '$ap_tmpdir' 'anypaste.XXXXXXXXXX')
echo -ne \"\\rUploading chunk \$(( \$1 + 1 )) of $chunk_count.\" >&2
tail -c \"+\$(( $filemail_chunk_size * \$1 ))\" < '$ap_path' | head -c $filemail_chunk_size > \"\$chunk_tmp_file\"
transfer_local_url='$transfer_global_url&chunk='\$1
curl -sf --data-binary \"@\$chunk_tmp_file\" -H 'Content-Type: application/octet-stream' \"\$transfer_local_url\" -A \"\$ap_ua\" || { echo Chunk upload failed!; exit 255; }
rm -f \"$chunk_tmp_file\"
" filemail_upload_worker
# shellcheck disable=2181
if (( $? != 0 ))
then
echo 'Some chunk uploads failed.'
return 1
fi
echo >&2
echo 'Finishing upload.' >&2
complete=$(curl -sfXPOST --data "transferid=$id&transferkey=$key&failed=false" -A "$ap_ua" 'https://www.filemail.com/api/transfer/complete')
link=$(json_parse "$complete" 'downloadurl')
# file_key includes filekey= at beginning
get_data=$(curl -sfXPOST --data "transferid=$id&skipreg=false&checkhashes=true&filesLimit=3000" -A "$ap_ua" "https://www.filemail.com/api/transfer/get")
direct=$(json_parse "$get_data" 'downloadurl')
echo >&2
echo "Link: $link"
echo "Direct: $direct"
echo
;;
get_info)
echo '[name]'
echo 'Filemail'
echo '[description]'
echo 'A generic file host supporting up to 50GB files (!!). There is a two upload-per-IP-per-day limit, however, and all uploads are deleted after one week. This plugin supports parallel chunk uploading, just like the official HTML5 client.'
echo '[tags]'
echo 'private'
echo 'direct'
echo '[config]'
echo 'optional|filemail_job_count|The maximum number of chunk uploads to perform in parallel. Defaults to 4.'
echo 'optional|filemail_chunk_size|The maximum chunk size. Defaults to 5242880 and probably should not be changed.'
esac
}

function dmca_gripe {
case $1 in
check_eligibility)
Expand Down Expand Up @@ -540,6 +623,7 @@ function ap_notify_hook {
# FUNCTIONS

# @param optionally, a file extension.
# If this ever gets changed, make the change inside the filemail pluin xargs too.
function ap_mktemp {
mktemp -p "$ap_tmpdir" "anypaste.XXXXXXXXXX$1"
}
Expand Down Expand Up @@ -962,6 +1046,8 @@ function ap_collect_file_metadata {
echo 'NOTE: Ffprobe/ffmpeg is not installed. Compatibility checks for audio/video may be inaccurate.' >&2
fi
ap_human_name=${ap_n:-$(basename "$ap_path")}
ap_url_encode "$ap_human_name"
ap_human_name_escaped=$ap_url_encode_return
ap_size=$(wc -c < "$ap_path")
}

Expand Down Expand Up @@ -1036,11 +1122,11 @@ function ap_create_config {
# You'll need to uncomment (remove # at beginning of line) first
# ap_plugins=(
# 'sendvid' 'streamable' 'gfycat' # Videos/Gifs
# 'tinyimg' 'imgur' 'vgyme' # Images
# 'tinyimg' 'imgur' # Images
# 'clyp' # Audio
# 'hastebin' 'ixio' # Text
# 'docdroid' # Documents
# 'dmca_gripe' 'transfersh' 'fileio' # Generic
# 'dmca_gripe' 'keepsh' 'transfersh' 'filemail' 'fileio' # Generic
# )
# Make sure to use export `boop=whatever` for plugin settings, not just `boop=whatever`
Expand Down Expand Up @@ -1233,12 +1319,13 @@ function ap_main_inner {
ap_help='false'
ap_version='false'
ap_plugins=(
'sendvid' 'streamable' 'gfycat' 'tinyimg' 'imgur' 'vgyme' 'clyp' 'hastebin' 'ixio' 'docdroid' 'dmca_gripe' 'transfersh' 'fileio'
'sendvid' 'streamable' 'gfycat' 'tinyimg' 'imgur' 'clyp' 'hastebin' 'ixio' 'docdroid' 'dmca_gripe' 'keepsh' 'transfersh' 'filemail' 'fileio'
)
ap_hooks=()
ap_hook_policy='lazy'
ap_unicode='true'
ap_color='true'
ap_ua=$ap_version_text

ap_parse_args
$ap_list && exec 2>/dev/null
Expand Down Expand Up @@ -1300,7 +1387,7 @@ function ap_main {
local exit_code
ap_main_inner "$@"
exit_code="$?"
rm -f "$ap_tmpdir/anypaste.*"
rm -f "$ap_tmpdir"/anypaste.*
return "$exit_code"
}

Expand Down
2 changes: 2 additions & 0 deletions fixtures/plugins/essentials
Expand Up @@ -5,7 +5,9 @@ function essentials {
echo 'essentials check_eligibility'
;;
upload)
# shellcheck disable=2154
echo "essentials upload $ap_human_name"
# shellcheck disable=2154
echo "essentials upload path $ap_path"
;;
get_info)
Expand Down

0 comments on commit 82721ed

Please sign in to comment.