diff --git a/README.md b/README.md index 538b861..3296047 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,29 @@ # hls_downloader -A posix compliant highly fast and efficient Asynchronous stable m3u8 links dowloader that uses shell jobs for controlling parallel download... +A posix compliant highly fast and efficient Asynchronous stable m3u8 links parallel downloader that uses shell jobs for controlling parallel download... -# Increase Parallel Downloads.. +``` +Usage: + hls [ -o ] [ -r | -f ] [ ] + hls -h -Currently its set to my internet speed * 3 in [line 72](https://github.com/CoolnsX/hls_downloader/blob/main/hls#L72) in script +Options: + -h show helptext + -o filename (default : video) + -r select highest resolution automatically + -f skip ffmpeg file conversion (used to enable the video file to run on any video player) +``` + +# Increase/Decrease Parallel Downloads.. + +Currently its set to 36 in [line 23](https://github.com/CoolnsX/hls_downloader/blob/main/hls#L23) in script +You can Increase/Decrease it by using ```-n ``` ``` Internet Speed = 12 MByte per seconds.. 36 (Internet Speed * 3).. ``` +NOTE :- Increasing the number will make the download faster but less stable and Decreasing the number will make download slower but stable.. Decrease the number if the script is hanging out during download process.. # Dependency diff --git a/hls b/hls index 9df4d38..af386c7 100755 --- a/hls +++ b/hls @@ -7,25 +7,30 @@ help_text () { printf "%s\n" "$line" done <<-EOF Usage: - ${0##*/} [ -f ] [ -s ] [ ] + ${0##*/} [ -o ] [ -r | -f | -n ] [ ] ${0##*/} -h Options: -h show helptext - -f filename (default : video) - -s select highest resolution automatically + -o filename (default : video) + -r select highest resolution automatically + -n set maximum number of connections (default : 36) + -f skip ffmpeg file conversion (used to enable the video file to run on any video player) EOF } skip_res=0 +n=36 file="video" tmpdir="${XDG_CACHE_HOME:-$HOME/.cache}/hls-temp" jobdir="${XDG_CACHE_HOME:-$HOME/.cache}/hls-jobs" -while getopts 'fs' OPT; do +while getopts 'o:rfhn:' OPT; do case $OPT in - f) file=$OPTARG ;; - s) skip_res=1;; + o) file=$OPTARG ;; + n) n=$OPTARG ;; + f) skip_ffmpeg=1;; + r) skip_res=1;; *|h) help_text exit 0 @@ -38,26 +43,32 @@ shift $((OPTIND - 1)) trap "rm -rdf $tmpdir $jobdir;exit 0" INT HUP printf "\033[2K\r\033[1;36mFetching resolutions.." m3u8_data=$(curl -s "$link") -res_list=$(printf "%s" "$m3u8_data" | sed -nE 's_.*RESOLUTION=.*x([^,]*),.*_\1_p') +res_list=$(printf "%s" "$m3u8_data" | sed -nE 's_.*RESOLUTION=.*x([^,]*).*_\1_p') if [ -n "$res_list" ];then - highest_res=$(printf "$res_list" | sort -nr | head -1) - [ "$skip_res" -eq 1 ] && printf "\033[2K\r\033[1;36mSelecting highest resolution.." || (printf "\033[2K\r\033[1;33mRESOLUTIONS >>\n\033[0m$res_list\n\033[1;34mType ur preferred resolution (default: $highest_res) > " && read -r sel_res) - [ -z "$sel_res" ] && sel_res=$highest_res - unset highest_res res_list - url=$(printf "%s" "$m3u8_data" | sed -n "/$sel_res,/{n;p;}" | tr -d '\r') -#check whether the m3u8_data contains uri that starts from http - printf "%s" "$m3u8_data" | grep -q "http" || relative_url=$(printf "%s" "$link" | sed 's_[^/]*$__') - printf "\033[2K\r\033[1;36mFetching Metadata.." - resp="$(curl -s "${relative_url}$url")" + highest_res=$(printf "$res_list" | sort -nr | head -1) + [ "$skip_res" -eq 1 ] && printf "\033[2K\r\033[1;36mSelecting highest resolution.." || (printf "\033[2K\r\033[1;33mRESOLUTIONS >>\n\033[0m$res_list\n\033[1;34mType ur preferred resolution (default: $highest_res) > " && read -r sel_res) + [ -z "$sel_res" ] && sel_res=$highest_res + unset highest_res res_list + url=$(printf "%s" "$m3u8_data" | sed -n "/x$sel_res/{n;p;}" | tr -d '\r') + #check whether the m3u8_data contains uri that starts from http + printf "%s" "$m3u8_data" | grep -q "http" || relative_url=$(printf "%s" "$link" | sed 's_[^/]*$__') + printf "\033[2K\r\033[1;36mFetching Metadata.." + url="${relative_url}$url" + resp="$(curl -s "$url")" else - resp=$m3u8_data + url=$link + resp=$m3u8_data fi [ -d "$tmpdir" ] || mkdir -p "$tmpdir" #extract key uri and iv uri from encrypted stream if exists.. key_uri="$(printf "%s" "$resp" | sed -nE 's/^#EXT-X-KEY.*URI="([^"]*)"/\1/p')" [ -z "$key_uri" ] || iv_uri="$(printf "%s" "$resp" | sed -nE 's/^#EXT-X-IV.*URI="([^"]*)"/\1/p')" data="$(printf "%s" "$resp" | sed '/#/d')" -printf "%s" "$data" | grep -q "http" && relative_url='' || relative_url=$(printf "%s" "$link" | sed 's_[^/]*$__') +if printf "%s" "$data" | grep -q "http";then + relative_url='' +else + relative_url=$(printf "%s" "$url" | sed 's_[^/]*$__') +fi range=$(printf "%s\n" "$data" | wc -l) #for encrypted stream only @@ -73,7 +84,7 @@ printf "\033[2K\r\033[1;35mpieces : $range\n\033[1;33mDownloading.." for i in $(seq $range); do curl -s "${relative_url}$(printf "%s" "$data" | sed -n "${i}p")" > "$tmpdir/$(printf "%04d" "$i").ts" && printf "\033[2K\r\033[1;32m ✓ $i / $range done" & jobs -p > "$jobdir" - while [ "$(cat "$jobdir" | wc -w)" -ge 36 ];do jobs > "$jobdir";sleep 0.05;done + while [ "$(cat "$jobdir" | wc -l)" -ge $n ];do jobs > "$jobdir";sleep 0.05;done done wait @@ -92,8 +103,8 @@ fi rm -rdf $tmpdir $jobdir #conversion of allts file to mp4 video using ffmpeg.. printf "\033[2K\r\033[1;36mEncoding file to mp4 video..\n\033[0m" -ffmpeg -i "$file.ts" -loglevel error -stats -c copy "$file.mp4" +[ -z "$skip_ffmpeg" ] && ffmpeg -i "$file.ts" -loglevel error -stats -c copy "$file.mp4" || mv "$file.ts" "$file.mp4" #cleanup.. -rm $file.ts +rm "$file".ts printf "\033[2K\r\033[1;36m Done!!"