2
0

Compare commits

..

14 Commits

Author SHA1 Message Date
531a7cf39e Undo the maintainer's change: we won't force the download of PDFs. 2025-06-25 11:35:31 +02:00
014d1bbc5c The maintainers of the Filebrowser changed the configuration files. 2025-06-22 17:44:35 +02:00
0211db1e08 110-startlogs.sh: better continuation of existing log file (even if empty). 2025-06-15 12:39:42 +02:00
8307b29649 The .gz extension has been added to the CLASSES_PATTERN in xport_backup.sh 2025-05-26 17:13:11 +02:00
25c504601b Wrong variable name (BASE_DIR instead of SERVICE_BASE) in a check. 2025-05-26 17:04:27 +02:00
7638a0ea2d Added the downsync utility (both Bash and Powershell) to retrieve exported backups. 2025-05-20 18:55:24 +02:00
a2c6a76956 Typo in service name conversion. 2025-05-20 18:44:51 +02:00
31572ffa08 Various bugfixes and minor enhancements
* typo in .templates/nginx/nginx_static.inc
* better ACL comment in .templates/nginx/nginx_xport.inc
* missing dependency and wrong order in storage_gitbackup (MediaWiki, WordPress)
* wrong order in tools/build
* pointless warnings in tools/backup.d/xport_backup.sh
* new info file introduced: services/.ports
2025-05-20 11:44:31 +02:00
449a5012a9 Minor improvements
* 110-startlogs.sh no longer repeats existing log lines.
* Added a missing ACL suggestion in nginx_xport.inc
2025-03-31 12:05:01 +02:00
7ab0d50abf rotate_logs now rotates non-empty logs even if the service isn't running. 2025-03-17 16:00:25 +01:00
8f962df620 staticweb_filebrowser recipe: includable nginx stub added to the staticweb 2025-03-06 19:12:42 +01:00
0342dfe641 Nginx enhancements - split configuration (nginx_*.inc stubs) 2025-03-06 19:03:08 +01:00
eb5ede75f0 Additional export backups feature. 2025-03-06 18:55:00 +01:00
46fc58210e Added --noconf option to the rotate_folder script. 2025-03-06 18:37:19 +01:00
28 changed files with 830 additions and 42 deletions

BIN
.metadata

Binary file not shown.

View File

@ -11,6 +11,10 @@
# #
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu> # Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html) # License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# 2025-05-21 v0.3.1
# fix: Wrong variable name (BASE_DIR instead of SERVICE_BASE) in a check.
# 2025-05-20 v0.3
# fix: Wrong output redirection order (>/dev/null 2>&1 was reversed).
# 2024-08-25 v0.2 # 2024-08-25 v0.2
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS. # new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
# mod: Doesn't do backup if the service is down. # mod: Doesn't do backup if the service is down.
@ -72,7 +76,7 @@ while [ -h "$SOURCE" ]; do
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #" done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
# Let's find which version of docker-compose is installed. # Let's find which version of docker-compose is installed.
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then if [ $($DOCKER compose version >/dev/null 2>&1; echo $?) -eq 0 ]; then
# We'll use v2 if it is available. # We'll use v2 if it is available.
DOCKER_COMPOSE="$DOCKER" DOCKER_COMPOSE="$DOCKER"
commandstring="compose" commandstring="compose"
@ -132,7 +136,7 @@ fi
# The service must be running - silently gives up here if not. # The service must be running - silently gives up here if not.
# #
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \ [[ -z "$(cd "$SERVICE_BASE"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
&& exit 1 && exit 1
# Attempts the backup commit. # Attempts the backup commit.

View File

@ -0,0 +1,33 @@
# Includable nginx configuration.
#
# Additional Filebrowser service.
# Take a look to the proxy port setting.
location ~ /(api|files|login|static) {
proxy_pass http://localhost:$PAR_PROXYPORT;
error_page 500 502 503 504 @proxy_error;
client_max_body_size 1G;
keepalive_timeout 30;
proxy_read_timeout 300;
proxy_request_buffering on;
proxy_buffers 2048 16k;
proxy_buffer_size 16k;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-Server $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# This hack reverts a modification in Filebrowser (2.33.1):
# https://github.com/filebrowser/filebrowser/commit/8a14018861fe581672bbd27cdc3ae5691f70a108
# We don't force to download PDFs.
more_clear_headers -t 'application/pdf' 'Content-Disposition';
#websockets
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}

View File

@ -20,8 +20,12 @@ services:
volumes: volumes:
# The Linux user defined above must have R/W access here. # The Linux user defined above must have R/W access here.
- ./storage/volumes/staticweb:/srv - ./storage/volumes/staticweb:/srv
- ./storage/volumes/filebrowser_data/filebrowser.json:/.filebrowser.json # Proper mounts before 2.33.0 version (2025-06-18).
- ./storage/volumes/filebrowser_data/database.db:/.database.db #- ./storage/volumes/filebrowser_data/database.db:/.database.db
#- ./storage/volumes/filebrowser_data/filebrowser.json:/.filebrowser.json
# Proper mounts since 2.33.0 version (2025-06-18).
- ./storage/volumes/filebrowser_data/database.db:/database/filebrowser.db
- ./storage/volumes/filebrowser_data/filebrowser-new.json:/config/settings.json
extra_hosts: extra_hosts:
- "host.docker.internal:host-gateway" - "host.docker.internal:host-gateway"
labels: labels:

View File

@ -0,0 +1,8 @@
{
"port": 80,
"baseURL": "/",
"address": "",
"log": "stdout",
"database": "/database/filebrowser.db",
"root": "/srv"
}

View File

@ -7,8 +7,11 @@
# #
# Call as a Docker manager user (member of the docker Linux group) via cron. # Call as a Docker manager user (member of the docker Linux group) via cron.
# #
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu> # Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
# Kovács Zoltán <kovacs.zoltan@smartfront.hu>
# License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html # License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html
# 2025-05-20 v0.3
# fix: a stupid typo in service name conversion.
# 2025-03-05 v0.2.1 # 2025-03-05 v0.2.1
# mod: reworded some comments and renamed a variable. # mod: reworded some comments and renamed a variable.
# 2024-08-25 v0.2 # 2024-08-25 v0.2
@ -112,7 +115,7 @@ BACKUPDIR="${PAR_BACKUPDIR:-$BASE_DIR/$BACKUPDIR}"
# Converts the service name to an actual running container's name. # Converts the service name to an actual running container's name.
# #
MYCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandline ps -q "$SERVICENAME") | "$CUT" -c2-)" MYCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$SERVICENAME") | "$CUT" -c2-)"
# Gives up here if failed. # Gives up here if failed.
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi

View File

@ -11,6 +11,11 @@
# #
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu> # Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html) # License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# 2025-05-21 v0.3.1
# fix: Wrong variable name (BASE_DIR instead of SERVICE_BASE) in a check.
# 2025-05-20 v0.3
# fix: The docker itself was missing from the dependency list :(.
# fix: Wrong output redirection order (>/dev/null 2>&1 was reversed).
# 2024-08-25 v0.2 # 2024-08-25 v0.2
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS. # new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
# mod: Doesn't do backup if the service is down. # mod: Doesn't do backup if the service is down.
@ -50,7 +55,7 @@ YMLFILE="docker-compose.yml"
# #
TR=$(which tr 2>/dev/null) TR=$(which tr 2>/dev/null)
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
for item in cut date dirname git readlink for item in cut date dirname docker git readlink
do do
if [ -n "$(which $item)" ] if [ -n "$(which $item)" ]
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]')=$(which $item) then export $(echo $item | "$TR" '[:lower:]' '[:upper:]')=$(which $item)
@ -72,7 +77,7 @@ while [ -h "$SOURCE" ]; do
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #" done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
# Let's find which version of docker-compose is installed. # Let's find which version of docker-compose is installed.
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then if [ $($DOCKER compose version >/dev/null 2>&1; echo $?) -eq 0 ]; then
# We'll use v2 if it is available. # We'll use v2 if it is available.
DOCKER_COMPOSE="$DOCKER" DOCKER_COMPOSE="$DOCKER"
commandstring="compose" commandstring="compose"
@ -132,7 +137,7 @@ fi
# The service must be running - silently gives up here if not. # The service must be running - silently gives up here if not.
# #
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \ [[ -z "$(cd "$SERVICE_BASE"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
&& exit 1 && exit 1
# Attempts the backup commit. # Attempts the backup commit.
@ -146,6 +151,7 @@ if [ ! -d "$GITDIR/.git" ]; then
fi fi
# Stages all the files and non-empty folders. # Stages all the files and non-empty folders.
"$GIT" --git-dir="$GITDIR/.git" --work-tree="$SOURCEDIR" add . >/dev/null "$GIT" --git-dir="$GITDIR/.git" --work-tree="$SOURCEDIR" add . >/dev/null
# Stores the file system metadata as well, if the tool has been installed. # Stores the file system metadata as well, if the tool has been installed.
if [ ! -z "$(which metastore)" -a -x "$(which metastore)" ]; then if [ ! -z "$(which metastore)" -a -x "$(which metastore)" ]; then
# This commamd silently creates the metastore file if it doesnt' exist yet. # This commamd silently creates the metastore file if it doesnt' exist yet.

7
.templates/.ports Normal file
View File

@ -0,0 +1,7 @@
# This is a human-readable summary of the port allocation.
# To be maintained manually. You can use it as you like,
# it doesn't matter for docker-skeleton automations.
8100 ACME for SSL certificates
8201

View File

@ -55,6 +55,9 @@
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu> # Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
# Kovács Zoltán <kovacsz@marcusconsulting.hu> # Kovács Zoltán <kovacsz@marcusconsulting.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html) # License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# 2025-03-06 v1.1
# new: added the --noconf option which prevents reading and creating
# the configuration file.
# 2023-06-18 v1.0 # 2023-06-18 v1.0
# new: forked from the "SMARTERP_skeleton" repository. # new: forked from the "SMARTERP_skeleton" repository.
# 2021.02.12 v0.3 # 2021.02.12 v0.3
@ -77,6 +80,7 @@ RETAIN_MONTHS=${RETAIN_MONTHS-"12"}
# Other initialisations (maybe overridden by configuration). # Other initialisations (maybe overridden by configuration).
# #
DOIT="" DOIT=""
NOCONF=""
# Messages (maybe overriden by configuration). # Messages (maybe overriden by configuration).
# #
@ -106,6 +110,7 @@ do
case ${option} in case ${option} in
"-" ) "-" )
if [ "$OPTARG" = "doit" ]; then DOIT="yes" if [ "$OPTARG" = "doit" ]; then DOIT="yes"
elif [ "$OPTARG" = "noconf" ]; then NOCONF="yes"
else echo "$MSG_BADOPT --$OPTARG" >&2; exit 1 else echo "$MSG_BADOPT --$OPTARG" >&2; exit 1
fi fi
;; ;;
@ -135,9 +140,10 @@ BACKUP_FOLDER=${BACKUP_FOLDER%/}
if [ -z "$BACKUP_FOLDER" -o ! -d "$BACKUP_FOLDER" -o ! -w "$BACKUP_FOLDER" ] if [ -z "$BACKUP_FOLDER" -o ! -d "$BACKUP_FOLDER" -o ! -w "$BACKUP_FOLDER" ]
then echo -e "$MSG_BADFOLDER $BACKUP_FOLDER" >&2; exit 1; fi then echo -e "$MSG_BADFOLDER $BACKUP_FOLDER" >&2; exit 1; fi
# Gets the configuration (if any). # Applies the configuration (if it exists and if it doesn't need to be ignored).
BACKUP_CONF="$BACKUP_FOLDER/.$("$BASENAME" "$0").conf" BACKUP_CONF="$BACKUP_FOLDER/.$("$BASENAME" "$0").conf"
if [ -r $BACKUP_CONF ]; then . "$BACKUP_CONF" if [ "$NOCONF" = "yes" ]; then :
elif [ -r $BACKUP_CONF ]; then . "$BACKUP_CONF"
else else
# Warns about failure. # Warns about failure.
echo -e "$MSG_NOCONF $BACKUP_CONF" echo -e "$MSG_NOCONF $BACKUP_CONF"

View File

@ -67,24 +67,15 @@ server {
You may report this at <a href='mailto:$server_admin'>$server_admin</a>.</span>"; You may report this at <a href='mailto:$server_admin'>$server_admin</a>.</span>";
} }
# #
# Static service. # Optional export backups function.
# location / { # Needs ACLs, see the include file.
# root $PAR_SERVICE/storage/volumes/staticweb/; #include $PAR_SERVICE/configs/nginx_xport.inc;
# allow all;
# autoindex off;
# index index.html index.htm;
# try_files $ri $uri/ =404;
# }
# #
# No static service, but we may provide the OPTIONS for a potential DAV client. # Optional simple static service.
# location / { #include $PAR_SERVICE/configs/nginx_static.inc;
# types { } default_type text/html; #
## dav_ext_methods OPTIONS; # Optional simple disabled-static servioe.
# if ($request_method != OPTIONS) { #include $PAR_SERVICE/configs/nginx_nostatic.inc;
# return 404 "<span style='font-size: x-large'>Sorry try <a href='$scheme://$server_name/$PAR_LOCATION'>$scheme://$server_name/$PAR_LOCATION</a> instead.</span>";
## return 302 $scheme://$server_name/$PAR_LOCATION;
# }
# }
################################################################################## ##################################################################################
# The SSL part # The SSL part

View File

@ -0,0 +1,11 @@
# Includable nginx configuration.
#
# A simple no-static service.
location / {
types { } default_type text/html;
# dav_ext_methods OPTIONS;
if ($request_method != OPTIONS) {
return 404 "<span style='font-size: x-large'>Sorry try <a href='$scheme://$server_name/$PAR_LOCATION'>$scheme://$server_name/$PAR_LOCATION</a> instead.</span>";
# return 302 $scheme://$server_name/$PAR_LOCATION;
}
}

View File

@ -0,0 +1,10 @@
# Includable nginx configuration.
#
# A simple static service.
location / {
root $PAR_SERVICE/storage/volumes/staticweb/;
allow all;
autoindex off;
index index.html index.htm;
try_files $uri $uri/ =404;
}

View File

@ -0,0 +1,20 @@
# Includable nginx configuration.
#
# Export backups feature.
# Needs
# setfacl -m u:www-data:r configs/xport_backup
# chmod a-x configs/xport_backup
# setfacl -m u:www-data:rx storage/backups
# setfacl -m u:www-data:rx storage/backups/export
# setfacl -d -m u:www-data:r storage/backups/export
# ACLs.
location /export {
root $PAR_SERVICE/storage/backups;
auth_basic "Export backups area";
auth_basic_user_file $PAR_SERVICE/configs/xport_backup;
allow all;
autoindex on;
autoindex_exact_size off;
autoindex_format html;
autoindex_localtime on;
}

229
.utils/downsync/downsync Executable file
View File

@ -0,0 +1,229 @@
#!/bin/bash
#
# A humble shell script for one-way (down) synchronization of a remote
# web folder (e.g created by the xbackup utility). Performs the basic
# authentication if it is necessary.
#
# It does not handle any remote subfolders, only the root folder. Downloads
# all files that do not exist locally. Updates only an existing file that is
# older than the remote source. It warns of errors or possible inconsistencies.
#
# Actually it is only a pretty fatty wrapper to the wget :).
# Creates a unique log file in the local folder (this can be disabled).
#
# Usage: $0 [ -u remote_usename ] [ -p base64_encoded_password ]
# [ -m max_tries ] [ -w wait_seconds ] [ -s small_size_warn ]
# [ --nolog ] [ --info | --verbose ]
# remote_URI [ local_folder ]
#
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# 2025-03-21 v0.1 Initial release.
# Messages.
#
MSG_BADEXIT="The worker finished with an exit code:"
MSG_BADOPT="Invalid option"
MSG_BADLOCAL="Must be an existing writable folder:"
MSG_BADPARAM="Doubtful parameter:"
MSG_BADPASS="Password must be base64-encoded:"
MSG_BADURI="Must be a valid http(s) address:"
MSG_MISSINGLOCAL="Please specify the local folder."
MSG_MISSINGURI="Remote URI is mandatory."
MSG_SMALLFILES="List of the downloaded files shorter than"
MSG_USAGE="Usage: $0 [ -u remote_usename ] [ -p base64_encoded_password ] "
MSG_USAGE+="[ -m max_tries ] [ -w wait_seconds ] [ -s small_size_warn ] "
MSG_USAGE+="[ --nolog ] [ --info | --verbose ] "
MSG_USAGE+=" remote_URI [ local_folder ]"
# Basic environment settings.
#
LANG=C
LC_ALL=C
# Initialisations.
#
LOGSTAMP="\"\$DATE\" +%Y%m%d-%H%M%S" # Timestamp format for logfile
NEWFILEMINS=60 # A file younger than this is "new"
SHORTFILEEX="\(\.log\|\.tmp\)$" # Exceptions to short file checking
#
WGET_OPTIONS="-e robots=off --no-parent --no-directories "
WGET_OPTIONS+="--recursive --level=1 --exclude-directories='*' --reject index.htm* "
WGET_OPTIONS+="--timestamping --continue "
WGET_OPTIONS+="--no-verbose "
# Default parameters.
#
MAXTRIES=3 # On error it will try to download a file
# at most this many times.
NOLOG="" # If not empty, it will not write log file.
SMALLSIZE=1024 # Warns if the downloaded file isn't a log
# and is shorter than this value.
VERBOSE="" # If not empty, it will display log lines.
WAITSECS=5 # On error it will wait this many seconds between
# two download attempts.
# Gets the options (if any).
#
while getopts ":-:m:M:p:P:s:S:u:U:w:W:" option
do
case ${option} in
"-" )
if [ "$OPTARG" = "nolog" ]; then NOLOG="yes"
elif [ "$OPTARG" = "info" ]; then VERBOSE="yes"
elif [ "$OPTARG" = "verbose" ]; then VERBOSE="yes"
elif [ "$OPTARG" = "help" ]; then echo -e "$MSG_USAGE" >&2; exit
else echo "$MSG_BADOPT --$OPTARG" >&2; exit 1
fi
;;
"m" | "M" )
MAXTRIES="$OPTARG"
;;
"p" | "P" )
MYPASS="$OPTARG"
;;
"s" | "S" )
SMALLSIZE="$OPTARG"
;;
"u" | "U" )
MYUSER="$OPTARG"
;;
"w" | "W" )
WAITSECS="$OPTARG"
;;
\? )
echo "$MSG_BADOPT -$OPTARG" >&2; exit 1
;;
esac
done; shift $((OPTIND -1))
#
# All provided options were processed.
# Checks the dependencies.
#
TR=$(which tr 2>/dev/null)
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
for item in base64 basename cat date find grep tee wget
do
if [ -n "$(which $item)" ]
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]')=$(which $item)
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
done
#
# All dependencies are available via "$THECOMMAND" (upper case) call.
# Sanitizes the options.
# Some below are just arbitrary restrictions (reliable source: TODO!).
#
# MAXTRIES is a non-zero positive integer:
[[ -n "$MAXTRIES" ]] && [[ ! "$MAXTRIES" =~ ^[1-9][0-9]*$ ]] \
&& echo -e "$MSG_BADPARAM -m $MAXTRIES\n$MSG_USAGE" >&2 && exit 1
# MYUSER is empty or not too strange (whatever this means):
[[ -n "$MYUSER" ]] && [[ ! "$MYUSER" =~ ^([[:alnum:]]|[.-_\\+])*$ ]] \
&& echo -e "$MSG_BADPARAM -u $MYUSER\n$MSG_USAGE" >&2 && exit 1
# MYPASS is empty or Base64-encoded:
if [ -n "$MYPASS" ]; then
[[ ! "$MYPASS" =~ ^[-A-Za-z0-9+/]*={0,3}$ ]] \
&& echo -e "$MSG_BADPASS -p $MYPASS\n$MSG_USAGE" >&2 && exit 1
# Tries to decode it.
echo "$MYPASS" | "$BASE64" --decode >/dev/null 2>&1
[[ $? -gt 0 ]] \
&& echo -e "$MSG_BADPASS -p $MYPASS\n$MSG_USAGE" >&2 && exit 1
MYPASS=$(echo "$MYPASS" | "$BASE64" --decode)
fi
# SMALLSIZE is a non-zero positive integer:
[[ -n "$SMALLSIZE" ]] && [[ ! "$SMALLSIZE" =~ ^[1-9][0-9]*$ ]] \
&& echo -e "$MSG_BADPARAM -s $SMALLSIZE\n$MSG_USAGE" >&2 && exit 1
# WAITSECS is a non-negative integer (can be zero):
[[ -n "$WAITSECS" ]] && [[ ! "$WAITSECS" =~ ^[0-9][0-9]*$ ]] \
&& echo -e "$MSG_BADPARAM -w $WAITSECS\n$MSG_USAGE" >&2 && exit 1
#
# We checked the options at least minimally.
# Formally checks the remote URI povided.
#
# 1st non-option parameter is the remote URI.
if [ -z "$REMOTEURI" -a -n "$1" ]; then REMOTEURI="$1"; shift; fi
# It is mandatory.
[[ -z "$REMOTEURI" ]] \
&& echo -e "$MSG_MISSINGURI\n$MSG_USAGE" >&2 && exit 1
# Must be a valid http(s) address.
[[ ! "$REMOTEURI" =~ ^https?://([[:alnum:]]|[.-])/?.*$ ]] \
&& echo -e "$MSG_BADURI $REMOTEURI" >&2 && exit 1
# Adds a trailing slash.
REMOTEURI="${REMOTEURI%/}/"
#
# We checked the remote URI at least minimally.
# Determines the download directory.
#
# 2nd non-option parameter is the local folder's pathname.
if [ -z "$LOCALDIR" -a -n "$1" ]; then LOCALDIR="$1"; shift; fi
# Defaults to the current folder.
[[ -z "$LOCALDIR" ]] && LOCALDIR="$PWD"
[[ -z "$LOCALDIR" ]] && LOCALDIR="$($(which pwd))"
# This should not happen... Gives it up.
[[ -z "$LOCALDIR" ]] \
&& echo -e "$MSG_MISSINGLOCAL" >&2 && exit 1
# Must be a writable folder.
if [ ! -d "$LOCALDIR" -o ! -w "$LOCALDIR" ]; then
echo -e "$MSG_BADLOCAL $LOCALDIR" >&2; exit 1; fi
# Removes the trailing slash (if any).
LOCALDIR="${LOCALDIR%/}"
#
# We've a suitable download directory.
# Tries to retrieve only newer files from the remote URL.
#
# Composes the credentials (if any).
WGET_CREDENTIALS=""
[[ -n "$MYUSER" ]] && WGET_CREDENTIALS="--http-user=$MYUSER --http-password=$MYPASS "
#
# Figures out how do we should logging.
[[ -n "$NOLOG" ]] \
&& LOGFILE="/dev/null" \
|| LOGFILE="$LOCALDIR/$("$BASENAME" "$0")_$(eval $LOGSTAMP).log"
#
# Calls parametrized wget as a worker.
if [ -n "$VERBOSE" ]; then
# We also need to write to the console.
"$WGET" $WGET_OPTIONS $WGET_CREDENTIALS --waitretry=$WAITSECS --tries=$MAXTRIES \
--directory-prefix="$LOCALDIR" "$REMOTEURI" \
>/dev/null 2> >("$TEE" "$LOGFILE" >&2); excode=$?
else
# We don't write to the console.
"$WGET" $WGET_OPTIONS $WGET_CREDENTIALS --waitretry=$WAITSECS --tries=$MAXTRIES \
--directory-prefix="$LOCALDIR" "$REMOTEURI" \
>/dev/null 2>"$LOGFILE"; excode=$?
fi
#
# Checks the exit code, warns if non-zero.
if [[ excode -ne 0 ]]; then
# Displays the log file even if we called it to be quiet.
if [ -z "$VERBOSE" -a -z "$NOLOG" ]; then
"$CAT" "$LOGFILE" 2>/dev/null
fi
# Shows/appends the warning.
echo -e "\n$MSG_BADEXIT $excode" >&2
fi
#
# We tried to synchronize, we did what we could.
# Checks the files that are currently being downloaded,
# and warns if there are any short files among them.
# Files with extensions in SHORTFILEEX and the current
# logfile are excluded from this check.
#
SMALLFILES=$( \
"$FIND" "$LOCALDIR" -type f \
! -wholename "$LOGFILE" \
-newermt "- $NEWFILEMINS minutes" \
-size -${SMALLSIZE}c 2>/dev/null | \
"$GREP" -iv -e "$SHORTFILEEX"
)
# Warns if there are small files even if we called it to be quiet.
[[ -n "$SMALLFILES" ]] \
&& echo -e "\n$MSG_SMALLFILES $SMALLSIZE:\n$SMALLFILES" >&2
# That's all, Folks! :)

View File

@ -0,0 +1,233 @@
<#
.SYNOPSIS
Powershell script for one-way (down) synchronization of a remote web folder.
.DESCRIPTION
It does not handle any remote subfolders, only the root folder. Downloads
all files that do not exist locally. Updates only an existing file that is
older than the remote source. It warns of errors or possible inconsistencies.
Creates a unique log file in the local folder (this can be disabled).
Usage: $PSCommandPath -Remote URI_to_be_synced [-Local local_folder]
[-User username] [-Pass base64_encoded_password]
[-NoLog] [-Info]
Author: Zolt<6C>n KOV<4F>CS <kovacsz@marcusconsulting.hu>
License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.html
.NOTES
Changelog:
2025-03-12 v0.1 Initial release.
#>
# Command line parameters.
#
param (
# An http(s) URI pointing to a remote web folder containing the files to synchronize.
[Parameter()][string]$Remote,
# An existing and writable local folder where the script will download the files.
[Parameter()][string]$Local = $PSScriptRoot,
# Credentials, if required by the remote website.
[Parameter()][string]$User,
# A base64-encoded password (if necessary).
[Parameter()][string]$Pass,
# On error the script will try to download a file at most this many times. Defaults to 3 tries.
[Parameter()][int]$MaxTries,
# On error the script will wait this many seconds between two download attempts. Defaults to 5 seconds.
[Parameter()][int]$WaitRetry,
# The script warns if the downloaded file is shorter than this value. Defaults to 1024 bytes.
[Parameter()][int]$SmallSize,
# If set, the script will not write log file.
[Parameter()][switch]$NoLog = $false,
# If set, the script will display log lines.
[Parameter()][switch]$Info = $false
)
# Initialisations.
#
if (-not $MaxTries) { $MaxTries = 3 }
if (-not $SmallSize) { $SmallSize = 1024 }
if (-not $WaitRetry) { $WaitRetry = 5 }
# Messages.
#
$Message = @{}
$Message['Bad DNS'] = 'Remote host is not an IP and not resolvable.'
$Message['Bad folder'] = "The local path must point to a writable folder."
$Message['Bad URI'] = 'Remote parameter must be a valid http(s) URI.'
$Message['Collision array'] = "The local path is an existing array:"
$Message['Downloaded'] = "Downloaded file:"
$Message['Empty filelist'] = "List of files is empty:"
$Message['Finished'] = "Synchronisation finished."
$Message['Is a folder'] = "Remote subfolders are ignored:"
$Message['Local newer'] = "The files are different but the local one is newer:"
$Message['Size mismatch'] = "Size of the downloaded file differ:"
$Message['Started'] = "Sychronisation started."
$Message['Unable fetchdir'] = "Unable to fetch the content of the remote folder."
$Message['Unable to decode'] = 'Password must be properly base64 encoded.'
$Message['Unable to stat remote'] = 'Unable to stat the remote object:'
$Message['Small file'] = "File is smaller than " + $SmallSize + " bytes:"
$Message['Usage'] = "Usage:`n" + `
$PSCommandPath + ' -Remote URI_to_be_synced [-Local local_folder] ' + `
'[-User username] [-Pass base64_encoded_password] ' + `
'[-NoLog True] [-Info True]'
# Logger function.
#
function Write-Log {
$date = Get-Date -Format "yyyy-MM-dd HH:mm:ss.fff"
if ( -not($NoLog)) { Add-Content -Path $LogFilePath -Value "$date $args" }
if ($Info) { Write-Host $args }
}
# Checks the -Remote parameter.
#
# It is mandatory.
if ( -not("$Remote")) { Write-Host $Message['Usage']; exit 1 }
# The closing / is necessary.
$Remote = $Remote.TrimEnd('/') + '/'
# Must be well-formed and http(s).
if ( -not([uri]::IsWellFormedUriString("$Remote", 'Absolute')) -or -not(([uri] "$Remote").Scheme -in 'http', 'https')) {
Write-Host $Message['Bad URI']; exit 1 }
# Must be IPv4 or resolvable.
if ( -not(([uri]"$Remote").Host -match "^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$" -and [bool](([uri]"$Remote").Host -as [ipaddress]))) {
# It is resolvable?
try { Resolve-DnsName -Name ([uri]"$Remote").Host -ErrorAction Stop | Out-Null }
catch { Write-Host $Message['Bad DNS']; exit 1 }
}
#
# We've a somewhat checked remote.
# Checks the -Local parameter.
#
# Must be an existing, writable folder.
if ( -not("$Local")) { $Local = $PSScriptRoot }
if ( -not(Test-Path -LiteralPath "$Local" -pathType Container)) { Write-Host $Message['Bad folder']; exit 1 }
# Can we write into?
try {
$testfile = $Local + '\' + [guid]::NewGuid() + '.tmp'
[io.file]::OpenWrite("$testfile").close()
Remove-Item -ErrorAction SilentlyContinue "$testfile" }
catch { Write-Host $Message['Bad folder']; exit 1 }
#
# We've a somewhat checked local folder.
# Decodes the provided -Pass (if any).
#
if ("$Pass") {
try { $Pass = ([System.Text.Encoding]::ASCII.GetString([System.Convert]::FromBase64String($Pass))) }
catch { Write-Host $Message['Unable to decode']; exit 1 }
}
#
# We've a decoded (or empty) password.
# Initializes the log file.
#
$LogFilePath = $Local + '\' + (Get-Item $PSCommandPath ).Basename + (Get-Date -Format "-yyyyMMdd-HHmmss") +'.log'
Write-Log $Message['Started']
#
# We've the log file ready to use.
# Prepares the Authorization header from provided credentials (if any).
#
$Headers = ''
if ("$User" ) {
$encoded = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("$($User):$($Pass)"))
$Headers = @{ Authorization = "Basic $encoded" }
}
# We've an Authorization header ready to use for Webrequests.
# Let's get the directory index from the remote source.
#
$response = ''
try {
$ProgressPreference = 'SilentlyContinue'
if ("$Headers") {$response = (Invoke-WebRequest -Uri "$Remote" -Headers $Headers -UseBasicParsing) }
else {$response = (Invoke-WebRequest -Uri "$Remote" -UseBasicParsing ) }
}
catch { Write-Log $Message['Unable fetchdir'] "$Remote" $_.Exception.Response.StatusCode.Value__ $_.Exception.Response.StatusDescription; exit 1 }
$files = @($response.Links.HREF | select -skip 1)
#
# We send a warning if it is empty.
#
if ($files.Count -eq 0) { Write-Log $Message['Empty filelist'] "$Remote" }
#
# We've the list of remote files.
# Processes the remote files in a row, one after the other.
#
foreach ($file in $files) {
#
# Let's get the parameters of the remote object. On error we send a warning and move on.
#
$remoteHeaders = ''
try {
$ProgressPreference = 'SilentlyContinue'
if ("$Headers") { $remoteHeaders = (Invoke-WebRequest -Uri ("$Remote" + "$file") -Headers $Headers -Method Head -UseBasicParsing ).Headers }
else { $remoteHeaders = (Invoke-WebRequest -Uri ("$Remote" + "$file") -Method Head -UseBasicParsing).Headers }
}
catch { Write-Log $Message['Unable to stat remote'] ("$Remote" + "$file") $_.Exception.Message; continue }
$remoteDate = $remoteHeaders['Last-Modified']
$remoteSize = $remoteHeaders['Content-Length']
$remoteType = $remoteHeaders['Content-Type']
#
# If the remote object is a folder we send a warning and move on.
#
if ("$remoteType" -eq 'text/directory') { Write-Log $Message['Is a folder'] ("$Remote" + "$file"); continue }
#
# If we've a local object and it is a folder we send a warning and move on.
#
if (Test-Path -LiteralPath "$Local\$file" -PathType Container) { Write-Log $Message['Collision array'] "$Local\$file"; continue }
#
# We've an existing local file?
#
if (Test-Path -LiteralPath "$Local\$file" -PathType Leaf) {
$localDate = (Get-Item -LiteralPath ("$Local" + '\' + "$file")).LastWriteTime.DateTime
$localSize = (Get-Item -LiteralPath ("$Local" + '\' + "$file")).Length
#
# If the local file is newer than remote we don't replace it, but we send a warning if the sizes are different.
#
if ((Get-Date $localDate) -gt (Get-Date $remoteDate)) {
if ( $localSize -ne $remoteSize ) { Write-Log $Message['Local newer'] $file }
continue
}
}
#
# OK, we decided to download the remote file.
# On failure, we'll try again a few times.
#
for ($i = 1; $i -le $MaxTries; $i++) {
try {
$ProgressPreference = 'SilentlyContinue'
if ("$Headers") { Invoke-WebRequest -Uri ("$Remote" + "$file") -Headers $Headers -OutFile ($Local + '\' + $file) }
else { Invoke-WebRequest -Uri ("$Remote" + "$file") -OutFile ($Local + '\' + $file) }
#
Write-Log $Message['Downloaded'] ("$Remote" + "$file")
#
# Checks the size of the downloaded file, stops trying if it is OK.
#
$localSize = (Get-Item -LiteralPath ("$Local" + '\' + "$file")).Length
if ( $localSize -eq $remoteSize ) {
#
# We send a warning on small files (except the logs).
#
if ($localSize -lt $SmallSize -and (Get-Item ("$Local" + "\" + "$file")).Extension -notin ('.log')) {
Write-Log $Message['Small file'] ("$Local" + "\" + "$file") }
break
}
#
Write-Log $Message['Size mismatch'] $Local\$file $localSize $remoteSize
}
catch { Write-Log $Message['Unable to download'] ("$Remote" + "$file") $_.Exception.Message }
#
# Waits before retrying.
#
Start-Sleep -Seconds $WaitRetry
}
}
#
# That's all.
#
Write-Log $Message['Finished']

View File

@ -0,0 +1,17 @@
#!/bin/bash
#
# This script retrieves daily backups from our remote web services.
# Contains (encoded) passwords, keep it confidentially!
# Maintained by hand.
# A download step
$HOME/bin/downsync \
-u <https user> -p "<base64-encoded https password>" \
'https://<remote website>/export' \
'<download destination pathname>'
# More download steps (if any)
# Rotates all backup folders.
# The actual schedule is defined per folder in the .rotate_folder.conf files.
$HOME/bin/rotatebackups "<downloads destination root folder>"

47
.utils/downsync/rotatebackups Executable file
View File

@ -0,0 +1,47 @@
#!/bin/bash
#
# Backup folders maintenance operation planned at once a day.
# This script called usually by the cron (but indirectly).
# Uses the rotate_folder utility which must be available on path.
#
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# 2025-05-20 v0.1 Initial release
# Will maintain child subfolders of this directory.
# Input parameter (if any) will be sanitized later.
[[ -n "$1" ]] \
&& BACKUPSROOT="$1" \
|| BACKUPSROOT="$PWD"
# Checks the components.
[[ -z "$(which dirname)" ]] && exit 1
[[ -z "$(which readlink)" ]] && exit 1
[[ -z "$(which xargs)" ]] && exit 1
# Where I'm?
SCRPATH="$( cd -P "$( "$(which dirname)" "$0" )" && echo "$PWD" )"
# Rotates the backup folders.
#
# Enumerates the folders and tries to rotate they content.
for folder in $(ls -1 "$BACKUPSROOT" 2>/dev/null | $(which xargs) -0 ) ""
do
if [ -n "$folder" ]; then
# Dereferenced absolute path.
folder="$("$(which readlink)" -e "$BACKUPSROOT/$folder")" #"
# Does it a folder with a prepared configuration?
if [ -d "$folder" -a -r "$folder/.rotate_folder.conf" ]; then
# Does the rotate job.
if [ -x "$SCRPATH/rotate_folder" ]; then
"$SCRPATH/rotate_folder" -f "$folder" >/dev/null
elif [ -x "$(which rotate_folder)" ]; then
"$(which rotate_folder)" -f "$folder" >/dev/null
fi
fi
fi
done
#
# Done with rotating.
# That's all, Folks :)

1
configs/.gitignore vendored
View File

@ -1,4 +1,5 @@
# Ignore everything else in this directory. # Ignore everything else in this directory.
* *
!certs !certs
!xport_backup
!.gitignore !.gitignore

3
configs/xport_backup Normal file
View File

@ -0,0 +1,3 @@
# Credentials file for exported backups feature.
# Needs username:apr1-hashed password entries, one per line.
# Use https://htpasswd.utils.com/ or some similar to fill in.

5
storage/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Ignore everything in this directory except this folders.
*
!.gitignore
!backups
!volumes

5
storage/backups/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Ignore everything in this directory except these files.
*
!.gitignore
!export
!tarballs

4
storage/backups/export/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory except this file.
*
!.gitignore
!.rotate_folder.conf

View File

@ -0,0 +1,9 @@
# This is a shell script excerpt for configuration purposes only.
# Handle with care! Please don't put code here, only variables.
CLASSES_PATTERN="^([^.]*)\..*\.$HOSTNAME\.(dmp|sql\.gz|tgz|log)$"
DOIT="yes" # if empty the script makes a dry run
RETAIN_DAYS=7 # retains all files created within that many days
RETAIN_WEEKS=0 # retains one file per week/month,
RETAIN_MONTHS=0 # created within that many weeks/months

View File

@ -0,0 +1,109 @@
#!/bin/bash
#
# Optional additional backup operation, intended to export an (almost)
# up-to-date downloadable copy for our customers about their data
# handled by us. The script synchronizes some of the existing backup
# files to an export folder that can be downloaded from the web.
#
# Uses the rotate_folder tool to select files to synchronize.
# This tool must be somewhere in the path.
#
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# 2025-05-26 v0.3
# mod: The .gz extension has been added to the CLASSES_PATTERN.
# 2025-04-02 v0.2
# fix: Omits all warnings about missing source folder(s).
# 2025-03-06 v0.1 Initial release
# Accepted environment variables and their defaults.
PAR_BASEDIR=${PAR_BASEDIR:-""} # Service's base folder
PAR_DUMPDIR=${PAR_DUMPDIR:-""} # Absolute path to DB dumps
PAR_EXPORTDIR=${PAR_EXPORTDIR:-""} # Absolute path to export dir
PAR_RETAINDAYS=${PAR_RETAINDAYS:-"1"} # Days to retain the copies
PAR_TARBALLDIR=${PAR_TARBALLDIR:-""} # Absolute path to tgz dumps
# Other initialisations.
CLASSES_PATTERN="^([^.]*)\..*\.$HOSTNAME\.(dmp|sql\.gz|gz|tgz|log)$"
DUMPPATH="storage/backups/dumps" # Default path to DB dumps
EXPORTPATH="storage/backups/export" # Default path to export dir
TARBALLPATH="storage/backups/tarballs" # Default path to tgz dumps
USER=${USER:-LOGNAME} # Fix for cron enviroment only
YMLFILE="docker-compose.yml"
# Messages.
MSG_MISSINGDEP="Fatal: missing dependency"
MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
MSG_NONWRITE="The target directory isn't writable"
# Checks the dependencies.
TR=$(which tr 2>/dev/null)
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
for item in cp cut date dirname grep hostname readlink rotate_folder tar
do
if [ -n "$(which $item)" ]
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
done
# Where I'm?
# https://gist.github.com/TheMengzor/968e5ea87e99d9c41782
SOURCE="$0"
while [ -h "$SOURCE" ]; do
# resolve $SOURCE until the file is no longer a symlink
SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && pwd )" #"
SOURCE="$("$READLINK" "$SOURCE")"
# if $SOURCE was a relative symlink, we need to resolve it
# relative to the path where the symlink file was located
[[ $SOURCE != /* ]] && SOURCE="$SCRPATH/$SOURCE"
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && pwd )" #"
# Searches the base folder, containing a docker-compose.yml file.
# Called from the base folder (./)?
BASE_DIR="$PAR_BASEDIR"
TEST_DIR="$SCRPATH"
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
# Called from ./tools?
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
# Called from ./tools/*.d?
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
# On failure gives it up here.
if [ -z "$BASE_DIR" -o ! -r "$BASE_DIR/$YMLFILE" ]; then
echo "$MSG_MISSINGYML" >&2; exit 1
fi
# Sets the absolute paths.
DUMPDIR="${PAR_DUMPDIR:-$BASE_DIR/$DUMPPATH}"
EXPORTDIR="${PAR_EXPORTDIR:-$BASE_DIR/$EXPORTPATH}"
TARBALLDIR="${PAR_TARBALLDIR:-$BASE_DIR/$TARBALLPATH}"
# Exits silently if EXPORTDIR isn't present.
[[ ! -e "$EXPORTDIR" ]] && exit 0
# EXPORTDIR must be writable.
[[ ! -w "$EXPORTDIR" ]] \
&& echo "$MSG_NONWRITE: $BACKUPDIR" >&2 && exit 1
# Let's select and copy the appropriate backup files.
#
# We'll call rotate_folder (dry run) with CLASSES_PATTERN and PAR_RETAINDAYS
# set above to select relevant files created in the backup folders within last
# PAR_RETAINDAYS days. These files are synchronized with the cp -u statement.
#
# Enumerates the folders.
for folder in "$DUMPDIR" "$TARBALLDIR"
do
# Selects the appropriate files (which have the "DR" - daily retain - tag).
for filename in $((export CLASSES_PATTERN="$CLASSES_PATTERN" \
RETAIN_DAYS="$PAR_RETAINDAYS" RETAIN_WEEKS=0 RETAIN_MONTHS=0; \
"$ROTATE_FOLDER" --noconf -f "$folder" 2>/dev/null) | \
"$GREP" '^DR ' | "$CUT" -d' ' -f2) ""
do
# Updates the current file.
if [ -n "$filename" ]; then
"$CP" -u "$folder/$filename" "$EXPORTDIR/" 2>/dev/null
fi
done
done
# That's all, Folks! :)

View File

@ -8,6 +8,8 @@
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu> # Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
# Kovács Zoltán <kovacsz@marcusconsulting.hu> # Kovács Zoltán <kovacsz@marcusconsulting.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html) # License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# 2025-03-21 v1.2
# fix: wrong output redirection order in docker compose.
# 2024-08-24 v1.1 # 2024-08-24 v1.1
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS. # new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
# 2023-06-18 v1.0 # 2023-06-18 v1.0
@ -39,7 +41,7 @@ done
# #
# Let's find which version of docker-compose is installed. # Let's find which version of docker-compose is installed.
commandstring="" commandstring=""
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then if [ $($DOCKER compose version >/dev/null 2>&1; echo $?) -eq 0 ]; then
# We'll use v2 if it is available. # We'll use v2 if it is available.
DOCKER_COMPOSE="$DOCKER" DOCKER_COMPOSE="$DOCKER"
commandstring="compose" commandstring="compose"

View File

@ -1,6 +1,6 @@
#!/bin/bash #!/bin/bash
PAR_SERVICENAME= PAR_SERVICENAME=""
PAR_PROXYHOST="localhost" PAR_PROXYHOST="localhost"
PAR_PROXYPORT="8201" PAR_PROXYPORT="8201"
PAR_SERVERNAME="myservice.example.com" PAR_SERVERNAME="myservice.example.com"
@ -18,8 +18,13 @@ PAR_SERVICE="$HOME/services/$PAR_SERVICENAME"
PARAMETERS='$PAR_ACMEHOST:$PAR_ACMEPORT:$PAR_SERVICE:$PAR_PROXYHOST:$PAR_PROXYPORT:$PAR_SERVERNAME:$PAR_LOCATION:$PAR_WEBMASTER' PARAMETERS='$PAR_ACMEHOST:$PAR_ACMEPORT:$PAR_SERVICE:$PAR_PROXYHOST:$PAR_PROXYPORT:$PAR_SERVERNAME:$PAR_LOCATION:$PAR_WEBMASTER'
for parameter in $(echo "$PARAMETERS" | tr ":" "\n") for parameter in $(echo "$PARAMETERS" | tr ":" "\n")
do export ${parameter:1}; done do export ${parameter:1}; done
cat "$PAR_SERVICE/.templates/nginx/nginx.conf" | envsubst "$PARAMETERS" \ for template in $(cd "$PAR_SERVICE/.templates/nginx/"; ls -1 nginx*) ""
> "$PAR_SERVICE/configs/nginx.conf" do
if [ -n "$template" ]; then
cat "$PAR_SERVICE/.templates/nginx/$template" | envsubst "$PARAMETERS" \
> "$PAR_SERVICE/configs/$template"
fi
done
touch $PAR_SERVICE/logs/web/access.log $PAR_SERVICE/logs/web/error.log touch $PAR_SERVICE/logs/web/access.log $PAR_SERVICE/logs/web/error.log
chmod 660 $PAR_SERVICE/logs/web/access.log $PAR_SERVICE/logs/web/error.log chmod 660 $PAR_SERVICE/logs/web/access.log $PAR_SERVICE/logs/web/error.log

View File

@ -10,6 +10,8 @@
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu> # Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
# Kovács Zoltán <kovacsz@marcusconsulting.hu> # Kovács Zoltán <kovacsz@marcusconsulting.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html) # License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# 2025-03-17 v1.2
# mod: rotates non-empty logs even if the service isn't running.
# 2024-08-24 v1.1 # 2024-08-24 v1.1
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS. # new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
# 2023-06-18 v1.0 # 2023-06-18 v1.0
@ -98,10 +100,6 @@ CONFFILE="$CONFDIR/.${SCRFILE%.*}.conf"
STATEFILE="$CONFDIR/.${SCRFILE%.*}.state" STATEFILE="$CONFDIR/.${SCRFILE%.*}.state"
LOGDIR="${PAR_LOGDIR:-$BASE_DIR/$LOGDIR}" LOGDIR="${PAR_LOGDIR:-$BASE_DIR/$LOGDIR}"
# Doesn't rotate logs for stopped services.
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
&& exit 0
# Locates the worker script. # Locates the worker script.
WORKERSCRIPT="$SCRPATH/copytruncate" WORKERSCRIPT="$SCRPATH/copytruncate"
[[ ! -x "$WORKERSCRIPT" ]] && WORKERSCRIPT="$(which copytruncate)" [[ ! -x "$WORKERSCRIPT" ]] && WORKERSCRIPT="$(which copytruncate)"
@ -116,6 +114,7 @@ $LOGDIR/*.log {
missingok missingok
daily daily
rotate 30 rotate 30
notifempty
# Must be consistent with prerotate script's settings! # Must be consistent with prerotate script's settings!
dateext dateext
dateyesterday dateyesterday
@ -143,6 +142,7 @@ $LOGDIR/web/*.log {
missingok missingok
daily daily
rotate 60 rotate 60
notifempty
# Must be consistent with prerotate script's settings! # Must be consistent with prerotate script's settings!
dateext dateext
dateyesterday dateyesterday

View File

@ -6,7 +6,11 @@
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu> # Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html) # License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
# #
# 2025-02-26 v0.1 # 2025-06-01 v0.4
# fix: better continuation of existing log file (even if empty).
# 2025-03-29 v0.3
# mod: no longer repeats existing log lines.
# 2025-02-26 v0.2
# fix: a silly typo (commandtring) blocked the startup. # fix: a silly typo (commandtring) blocked the startup.
# 2025-02-02 v0.1 Initial release # 2025-02-02 v0.1 Initial release
@ -27,7 +31,7 @@ MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
# Checks the dependencies. # Checks the dependencies.
TR=$(which tr 2>/dev/null) TR=$(which tr 2>/dev/null)
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
for item in cut dirname docker grep ps readlink for item in cut date dirname docker grep ps readlink tail
do do
if [ -n "$(which $item)" ] if [ -n "$(which $item)" ]
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item) then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
@ -93,9 +97,21 @@ else
if [ -n "$service" ]; then if [ -n "$service" ]; then
# Converts the service's name to an actual running container's name. # Converts the service's name to an actual running container's name.
container="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$service") | "$CUT" -c2-)" container="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$service") | "$CUT" -c2-)"
# It will not start a new log if it already exists. # It will not start the log process if it already exists.
if [ -z "$("$PS" auxww | "$GREP" "$DOCKER" | "$GREP" 'logs' | "$GREP" "$container" )" ]; then if [ -z "$("$PS" auxww | "$GREP" "$DOCKER" | "$GREP" 'logs' | "$GREP" "$container" )" ]; then
"$DOCKER" logs -t -f $container >> "$BASE_DIR/$LOGDIR/$service.log" 2>&1 & # Determines the last existing log line in the log file (if any).
logline="$("$TAIL" -n1 "$BASE_DIR/$LOGDIR/$service.log" 2>/dev/null)"
# Gets the timestamp from this line.
[[ -n "$logline" ]] \
&& timestamp="$(echo "$logline" | "$CUT" -d' ' -f1 2>/dev/null)" \
|| timestamp="invalid"
# If the log does not contain a valid last timestamp, we write log lines
# created since the last time the container was started.
[[ $("$DATE" -d "$timestamp" >/dev/null 2>&1; echo $?) -eq 0 ]] \
&& since="$timestamp" \
|| since="$("$DOCKER" inspect -f '{{ .State.StartedAt }}' $container)"
# Only logs the new lines (actually repeats the last one - TODO!).
"$DOCKER" logs -t --since "$since" -f $container >> "$BASE_DIR/$LOGDIR/$service.log" 2>&1 &
fi fi
fi fi
done done