Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
johnnysteen committed Aug 14, 2021
1 parent 3b7c432 commit 6fdf568
Show file tree
Hide file tree
Showing 4 changed files with 70 additions and 36 deletions.
3 changes: 2 additions & 1 deletion config
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
URL='192.168.1.170'
uptime=6 #update interval in hours
uptime=24 #update interval in hours
documentroot=/Library/WebServer/Documents/
thumburl=""
numdl=3
17 changes: 11 additions & 6 deletions run-updates.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,24 +12,29 @@ feedname=''
while :
do
clear
which youtube-dl
youtube-dl --version
. config
let interval=3600*$uptime
echo 'The time is ' `date '+%Y.%m.%d %H:%M:%S'`

if [ "$feedname" = '' ]; then
youtube-dl -U
if [ "$argstr" = '' ]; then
#youtube-dl -U
while read LINE
do
echo "Checking feed '"$LINE"' for updates..."
bash update.sh $LINE 1
bash update.sh $LINE -n 1 2> /dev/null
done < feeds
else
echo "Checking feed '"$feedname"' for updates (downloading $numdls videos)..."
bash update.sh $feedname $numdls
#echo "Checking feed '"$feedname"' for updates (downloading $numdls videos)..."
#bash update.sh $feedname -n $numdls
echo "bash update.sh $argstr"
bash update.sh $argstr 2> /dev/null
fi

numdls=1
feedname=''
argstr=''
hr=10#$(expr $(date +%H) % $uptime)
let mins=10#$(date +%M)+60*$hr
let secs=10#$(date +%S)+60*$mins
Expand All @@ -40,7 +45,7 @@ do
echo 'Finished updating at ' `date '+%Y.%m.%d %H:%M:%S'`
echo 'Will return in' $sleephrs 'hrs' $sleepmins 'mins' $sleepsecs 'secs.'
echo 'Hit ENTER to update now or type FEEDNAME NUMDLS'
read -t $sleeptime feedname numdls
read -t $sleeptime argstr #feedname numdls
done


18 changes: 15 additions & 3 deletions setup.sh
Original file line number Diff line number Diff line change
@@ -1,14 +1,26 @@
#!/bin/bash

if [ $# -ne 1 ]; then
echo 'illegal number of arguments'
exit 0
else
feedname=$1
fi

. config
echo $feedname >> feeds
cd $documentroot

feedname=$1
echo "Initializing feed $feedname".

feeddir=$documentroot/$feedname

mkdir $feeddir
if [ $? -ne 0 ] ; then
echo "mkdir failed: try a different feedname or run with sudo"
exit 0
fi

echo $feedname >> feeds
cd $documentroot

echo -n "Enter feed title: "
read feedtitle
Expand Down
68 changes: 42 additions & 26 deletions update.sh
Original file line number Diff line number Diff line change
@@ -1,57 +1,72 @@
#!/bin/bash
echo '**********' `date` '**********' 1>&2
echo $PATH 1>&2
PATH=$PATH:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
cd /Users/jcost/youtube-to-rss/

. config

cd $documentroot/$1
. params

if [ -n "$2" ] && [ "$2" -eq "$2" ] 2>/dev/null; then
numdl=$2
else
numdl=1
fi

youtube-dl --datebefore `date -v-1d +%Y%m%d` --restrict-filenames --write-info-json -xciw --yes-playlist --max-downloads $numdl --download-archive archive $channelurl > log.out
OPTIND=2
while getopts ":n:v:" arg; do
case $arg in
n)
numdl=${OPTARG}
;;
v)
channelurl=${OPTARG}
;;
esac
done

#if [ -n "$2" ] && [ "$2" -eq "$2" ] 2>/dev/null; then
# numdl=$2
#else
# numdl=1
#fi

#youtube-dl --datebefore `date -v-1d +%Y%m%d` --restrict-filenames --write-info-json -xciw --yes-playlist --max-downloads $numdl --download-archive archive $channelurl > log.out
youtube-dl --cookies /Users/jcost/youtube-to-rss/youtube.com_cookies.txt --id --abort-on-unavailable-fragment --restrict-filenames --write-info-json -xci --yes-playlist --max-downloads $numdl --download-archive archive $channelurl > log.out

grep -i '\[download\] destination' log.out > log2.out

while read LINE
for finfojson in *.info.json;
do
oldfile=${LINE#*: } #Long F@#$ title!-1234567890A.webm
filebase=${oldfile%.*} #Long F@#$ title!-1234567890A
if [ -f $finfojson ]; then
filebase=${finfojson%%.*}

echo 'New episode: ' $filebase

vidurl=`cat "$filebase.info.json" | python -c "import sys, json; print(json.load(sys.stdin)['webpage_url'])"`
pubdate=`cat "$filebase.info.json" | python -c "import sys, json; print(json.load(sys.stdin)['upload_date'])"`
title=`cat "$finfojson" | python -c "import sys, json; print(json.load(sys.stdin)['title'])" | sed 's/\&/\&amp\;/g;s/\</\&lt\;/g;s/\>/\&gt\;/g'`
vidurl=`cat "$finfojson" | python -c "import sys, json; print(json.load(sys.stdin)['webpage_url'])"`
pubdate=`cat "$finfojson" | python -c "import sys, json; print(json.load(sys.stdin)['upload_date'])"`
pubstring=`date -jf '%Y%m%d' '+%a, %d %b %Y %H:%M:%S %z' $pubdate`
description=`cat "$filebase.info.json" | python -c "import sys, json; print(json.load(sys.stdin)['description'].encode('utf-8'))" | sed 's/\&/\&amp\;/g;s/\</\&lt\;/g;s/\>/\&gt\;/g'`
description=`cat "$finfojson" | python -c "import sys, json; print(json.load(sys.stdin)['description'].encode('utf-8'))" | sed 's/\&/\&amp\;/g;s/\</\&lt\;/g;s/\>/\&gt\;/g'`

echo 'New episode: ' $title
echo 'Published ' "$pubstring"

rm -- "$filebase.info.json"
rm -- "$finfojson"

list_of_files=( "$filebase".* )
oldfile="${list_of_files[0]}" #Long F@#$ title!-1234567890A.mp4
fileext=${oldfile##*.} #mp4
newfile=${filebase:(-11)}.$fileext #1234567890A.mp4
length=`ffmpeg -i "$oldfile" 2>&1 | grep Duration | awk -F: '{print 3600 * $2 + 60*$3 + $4 }'`

mv "$oldfile" $newfile
oldfile=$(echo $oldfile | sed 's/\&/\&amp\;/g;s/\</\&lt\;/g;s/\>/\&gt\;/g')
oldfile="${list_of_files[0]}"
newfile=$oldfile

length=`ffmpeg -i "$oldfile" 2>&1 | grep Duration | awk -F: '{print 3600 * $2 + 60*$3 + $4 }'`

mv feed.body feed.oldbody
cat << EOF >> feed.body
<item>
<title>${oldfile%-$newfile}</title>
<title>${title}</title>
<pubdate>$pubstring</pubdate>
<guid>${filebase:(-11)}</guid>
<guid>${filebase}</guid>
<enclosure url="http://$URL/$feedname/$newfile" type="audio/mpeg" length="$length"></enclosure>
<description>
${oldfile%-$newfile}
${title}
Original video: $vidurl
Downloaded: `date`
$description
</description>
Expand All @@ -60,7 +75,8 @@ EOF

cat feed.oldbody >> feed.body
rm feed.oldbody
done < log2.out
fi
done

cat << EOF > feed.newrss
<?xml version="1.0" encoding="UTF-8"?>
Expand Down

0 comments on commit 6fdf568

Please sign in to comment.