diff options
author | Dominik George <nik@velocitux.com> | 2022-06-23 22:59:10 +0200 |
---|---|---|
committer | Dominik George <nik@velocitux.com> | 2022-06-23 23:14:21 +0200 |
commit | 4f7de777b7bcf9b8fc15b203457b6c952be44769 (patch) | |
tree | 3b55f969ec8baf4d706b183b743a212a339bfa53 | |
parent | 24a295c05d4bf38d8319ce1aa5a66fca261a54e0 (diff) | |
download | impressive-display-4f7de777b7bcf9b8fc15b203457b6c952be44769.tar.gz impressive-display-4f7de777b7bcf9b8fc15b203457b6c952be44769.tar.bz2 impressive-display-4f7de777b7bcf9b8fc15b203457b6c952be44769.zip |
Implement file lists
File lists can now be used to define several sources to display.
This works close to the list feature of impressive, except that we do
not require an @ character, but instead treat all fiels ending in .txt
as list files.
Lists are scanned recursively, and fiels downloaded to generate the
new lists for impressive, and a final list is passed.
On download, all files are now compared to trigger a reload on change
of any file in the list.
-rwxr-xr-x | bin/impressive-display | 137 |
1 files changed, 92 insertions, 45 deletions
diff --git a/bin/impressive-display b/bin/impressive-display index d47195c..66412f3 100755 --- a/bin/impressive-display +++ b/bin/impressive-display @@ -39,7 +39,7 @@ VERSION="0.3.3" SUPPORTED_FILES=(pdf jpg jpeg png tif bmp ppm avi) if type -p mplayer 1>/dev/null; then - SUPPORTED_FILES+=(mov mp4 mkv webm ogv mpg mpeg ts flv) + SUPPORTED_FILES+=(mov mp4 mkv webm ogv mpg mpeg ts flv txt) fi # Can be configured at "/etc/default/impressive-display" @@ -102,10 +102,7 @@ else fi workdir="$HOME/.impressive-display" -mkdir -p "$workdir" -pdffile_previous="$workdir/impressive-pdffile-previous" -pdffile_downloaded="$workdir/impressive-pdffile-downloaded" -pdffile_display="$workdir/impressive-pdffile-display" +mkdir -p "$workdir/downloaded" "$workdir/display" session_lock="$workdir/.session-lock.$$" impressive_lock="$workdir/.impressive-lock" @@ -174,11 +171,6 @@ function uri_is_supported { } uri_is_supported "$PDF_URI" || exit -1 -uri_stripped=${PDF_URI%.gz} -extension=${uri_stripped##*.} -pdffile_previous=$pdffile_previous.${extension} -pdffile_downloaded=$pdffile_downloaded.${extension} -pdffile_display=$pdffile_display.${extension} if [ -n "${RESOLUTION}" ]; then if type -p xrandr 1>/dev/null; then @@ -218,13 +210,12 @@ function cleanup { for rmfile in $impressive_lock \ $session_lock \ - $pdffile_previous \ - $pdffile_downloaded \ - $pdffile_display \ + $workdir/downloaded/ \ + $workdir/display/ \ ; do if [ -e "$rmfile" ]; then - rm "$rmfile" + rm -r "$rmfile" fi done @@ -262,17 +253,15 @@ function pdf_is_portrait { } function merge_portrait_documents { + local pdffile=$1 if which pdfnup 1>/dev/null ; then - if pdf_is_portrait "$pdffile_downloaded"; then - cd $(dirname "$pdffile_downloaded") && pdfnup "$pdffile_downloaded" && cd - 1>/dev/null - mv "${pdffile_downloaded/.pdf/-nup.pdf}" "$pdffile_display" - else - cp "$pdffile_downloaded" "$pdffile_display" + if pdf_is_portrait "$pdffile"; then + cd $(dirname "$pdffile") && pdfnup "$pdffile" && cd - 1>/dev/null + mv "${pdffile/.pdf/-nup.pdf}" "$pdffile" fi else $OUTPUT "WARNING: The pdfnup tool is not installed." - cp "$pdffile_downloaded" "$pdffile_display" fi } @@ -336,12 +325,17 @@ function is_sleep_hour { } function download_pdffile { + local pdf_uri=$1 + local recursing=${2:-no} - if [ -r "$pdffile_downloaded" ]; then - cp "$pdffile_downloaded" "$pdffile_previous" - fi - - DOWNLOADED_FILE="$workdir/$(basename $PDF_URI)" + local pdf_filename=$(basename "$pdf_uri") + local pdf_stripped=${pdf_filename%.gz} + local pdf_extension=${pdf_stripped##*.} + + local pdffile_downloaded="$workdir/downloaded/$pdf_stripped" + local pdffile_target="$workdir/display/$pdf_stripped" + + DOWNLOADED_FILE="$workdir/downloaded/$pdf_filename" if [ -f "$workdir/secrets/default.secret" ]; then NETRCFILE=" --netrc-file $workdir/secrets/default.secret" fi @@ -350,44 +344,97 @@ function download_pdffile { fi if [ "x$LOG_TO_SYSLOG" = "xyes" ]; then - if ! curl $NETRCFILE "$PDF_URI" 1> "$DOWNLOADED_FILE" 2>/dev/null; then + if ! curl $NETRCFILE "$pdf_uri" 1> "$DOWNLOADED_FILE" 2>/dev/null; then $OUTPUT "The curl tool failed in retrieving PDF file. Run this tool interactively to debug problems." fi else - curl $NETRCFILE "$PDF_URI" 1> "$DOWNLOADED_FILE" + curl $NETRCFILE "$pdf_uri" 1> "$DOWNLOADED_FILE" fi - # is the PDF file gzipped? + # is the file gzipped? if echo "$DOWNLOADED_FILE" | grep -q -E '.*\.gz$'; then gunzip -f "$DOWNLOADED_FILE" - mv "${DOWNLOADED_FILE/.gz/}" "$pdffile_downloaded" - else - mv "${DOWNLOADED_FILE}" "$pdffile_downloaded" fi - if pdfinfo "$pdffile_downloaded" 1>/dev/null 2>/dev/null; then + # Treat special cases + if [[ $pdf_extension = txt ]]; then + # This is a list file - read it and recurse + while read -r line; do + if echo "$line" | grep -q -E '^#'; then + # Simply pass through comments + echo "$line" + else + next_uri=${line%%\#*} + next_uri=${next_uri%% *} + + if uri_is_supported "$next_uri"; then + # This will output the stripped filename / path + download_pdffile "$next_uri" yes + fi + fi + done <"$pdffile_downloaded" >"$pdffile_downloaded.new" + mv "$pdffile_downloaded.new" "$pdffile_downloaded" + elif [[ $pdf_extension = pdf ]] && pdfinfo "$pdffile_downloaded" 1>/dev/null 2>/dev/null; then + # This is a PDF file - merge portrait documents merge_portrait_documents + fi - cmp -s "$pdffile_downloaded" "$pdffile_previous"> /dev/null - if [ $? -eq 1 ]; then - echo -n "RELOAD" > $impressive_lock - $OUTPUT "INFO: Triggering PDF file reload, PDF file has changed..." - fi + # If this is the top-level download call, compare the two directories + if [[ $recursing = no ]]; then + local reload=no - else + # Check for files that are in display/ but not in downloaded/ + for file in "$workdir/"display/*; do + [[ $(basename "$file") = '*' ]] && break - $OUTPUT "WARNING: PDF file format problems. Maybe not a PDF file?" - cp "$pdffile_downloaded" "$pdffile_display" + if ! [[ -e "$workdir/downloaded/$(basename "$file")" ]]; then + # File is no longer desired, remove and trigger reload + rm "$file" + reload=yes + fi + done + + # Compare files in downloaded/ to files in display/ + for file in "$workdir/"downloaded/*; do + [[ $(basename "$file") = '*' ]] && break + cmp -s "$file" "$workdir/display/$(basename "$file")"> /dev/null + if [ $? -ne 0 ]; then + # File is new or changed, copy and trigger reload + mv "$file" "$workdir/display/$(basename "$file")" + reload=yes + fi + done + + if [[ $reload = yes ]]; then + $OUTPUT "INFO: Triggering PDF file reload, set of files has changed..." + echo -n "RELOAD" > $impressive_lock + fi fi - pdffile=$pdffile_display + if [[ $recursing = yes ]]; then + # Is this a recursive list? + if [[ $pdf_extension = txt ]]; then + # Output list of all filenames so we get a concatenated final list + cat "$pdffile_target" + else + # Print only the single filename for the parent call to grab + echo "$pdffile_target" + fi + else + # Is this a list file? Prepend @ + if [[ $pdf_extension = txt ]]; then + pdffile_display=@$pdffile_target + else + pdffile_display=$pdffile_target + fi + fi } function download_loop { # do an initial PDF retrieval.. - download_pdffile + download_pdffile "$PDF_URI" if [ "x$DOWNLOAD_INTERVAL" != "x0" ]; then ( @@ -402,19 +449,19 @@ function download_loop { if [ -n "$DEBUG_DOWNLOADS" ]; then $OUTPUT "INFO: RUSH HOUR download, URL: $PDF_URI" fi - download_pdffile + download_pdffile "$PDF_URI" elif is_sleep_hour 1>/dev/null && [ "x$SLEEP_HOURS_DOWNLOAD_INTERVAL" != "x0" ]; then sleep $SLEEP_HOURS_DOWNLOAD_INTERVAL if [ -n "$DEBUG_DOWNLOADS" ]; then $OUTPUT "INFO: SLEEP HOUR download, URL: $PDF_URI" fi - download_pdffile + download_pdffile "$PDF_URI" elif (! is_rush_hour 1>/dev/null ) && (! is_sleep_hour 1>/dev/null) && [ "x$DOWNLOAD_INTERVAL" != "x0" ]; then sleep $DOWNLOAD_INTERVAL if [ -n "$DEBUG_DOWNLOADS" ]; then $OUTPUT "INFO: NORMAL HOUR download, URL: $PDF_URI" fi - download_pdffile + download_pdffile "$PDF_URI" else if [ -n "$DEBUG_DOWNLOADS" ]; then $OUTPUT "DEBUG: Not doing any download." |