Compare commits
20 Commits
39c0f07fca
...
master
Author | SHA1 | Date | |
---|---|---|---|
531a7cf39e | |||
014d1bbc5c | |||
0211db1e08 | |||
8307b29649 | |||
25c504601b | |||
7638a0ea2d | |||
a2c6a76956 | |||
31572ffa08 | |||
449a5012a9 | |||
7ab0d50abf | |||
8f962df620 | |||
0342dfe641 | |||
eb5ede75f0 | |||
46fc58210e | |||
48faad59c7 | |||
e3b0dd03de | |||
05431b716d | |||
aaf493f13c | |||
7e89d1da9e | |||
de8bfe5137 |
@ -1,6 +1,5 @@
|
||||
# MediaWiki with MariaDB (optionally with extensions).
|
||||
#
|
||||
#version: '3'
|
||||
services:
|
||||
# https://hub.docker.com/_/mediawiki
|
||||
mediawiki:
|
||||
@ -17,10 +16,9 @@ services:
|
||||
# Needs R/W UID:GID 33:33 (www-data:www-data).
|
||||
- ./storage/volumes/mediawiki_images:/var/www/html/images
|
||||
# After initial setup, download LocalSettings.php
|
||||
# populate the following line and
|
||||
# use compose to restart the mediawiki service.
|
||||
# populate the following line and restart the mediawiki service.
|
||||
# Needs read UID or GID 33 (www-data).
|
||||
# - ./configs/LocalSettings.php:/var/www/html/LocalSettings.php:ro
|
||||
#- ./configs/LocalSettings.php:/var/www/html/LocalSettings.php:ro
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
labels:
|
||||
|
@ -1,28 +1,31 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# A service script to backup the docker-composed Mediawiki instance.
|
||||
# Dumps the MySQL/MariaDB database to the $BASE_DIR/storage/backups/dumps
|
||||
# folder (by default). An optional parameter may change the target folder.
|
||||
# A service script to backup the docker-composed MySQL/MariaDB database.
|
||||
# Dumps database to the $BASE_DIR/storage/backups/dumps folder (by default).
|
||||
# An optional parameter may change the target folder.
|
||||
#
|
||||
# This script gets the database credentials from MW's LocalSettings.php
|
||||
# This script gets the database credentials from the docker-compose.yml file
|
||||
# and calls the mysql_dumpdb worker script which should be installed in
|
||||
# the same folder or somewhere in the path.
|
||||
#
|
||||
# Call as a Docker manager user (member of the docker Linux group) via cron.
|
||||
# Uses the mysql_dumpdb utility which must be available on path.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
# 2025-02-26 v0.3
|
||||
# mod: doesn't tied to a particular composition (Mediawiki, Wordpress, etc).
|
||||
# 2024-12-01 v0.2.1
|
||||
# fix: typo in docker-compose version detection.
|
||||
# 2024-08-24 v0.2
|
||||
# 2024-08-25 v0.2
|
||||
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
|
||||
# 2021-08-27 v0.1 Initial version.
|
||||
# 2021-10-19 v0.1 Initial version.
|
||||
|
||||
# Accepted environment variables and their defaults.
|
||||
#
|
||||
PAR_BASEDIR=${PAR_BASEDIR:-""} # Service's base folder
|
||||
PAR_DUMPDIR=${PAR_DUMPDIR:-""} # Folder to dump within
|
||||
PAR_SERVICE=${PAR_SERVICE:-"database"} # Service's name in composition
|
||||
|
||||
# Messages (maybe overridden by configuration).
|
||||
#
|
||||
@ -33,11 +36,11 @@ MSG_MISSINGCONF="Fatal: missing config file"
|
||||
MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
|
||||
MSG_NONWRITE="The target directory isn't writable"
|
||||
MSG_NOLOCATE="Cannot locate the database container."
|
||||
MSG_NOPARAM="Missing PHP parameter"
|
||||
MSG_NOPARAM="Missing environment parameter"
|
||||
|
||||
# Other initialisations.
|
||||
#
|
||||
CONFFILE="configs/LocalSettings.php" # MW's configuration file
|
||||
CONFFILE="docker-compose.yml" # Configuration file
|
||||
DUMPDIR="storage/backups/dumps" # Folder to dump within
|
||||
USER=${USER:-LOGNAME} # Fix for cron enviroment only
|
||||
YMLFILE="docker-compose.yml"
|
||||
@ -112,12 +115,12 @@ DUMPDIR="${PAR_DUMPDIR:-$BASE_DIR/$DUMPDIR}"
|
||||
[[ ! -w "$DUMPDIR" ]] \
|
||||
&& echo "$MSG_NONWRITE: $DUMPDIR" >&2 && exit 1
|
||||
|
||||
# The service must be running - silently gives up here if not.
|
||||
# The composition must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Searches and parses the MW's LocalSettings.php file.
|
||||
# Searches and parses the config file.
|
||||
#
|
||||
if [ ! -r "$CONFFILE" ]; then
|
||||
echo "$MSG_MISSINGCONF $CONFFILE" >&2; exit 1
|
||||
@ -126,31 +129,29 @@ fi
|
||||
function parse { [[ -z "$1" ]] && return
|
||||
# Gets the live lines containing the parameter.
|
||||
value=$("$CAT" "$CONFFILE" | "$GREP" -ve '^#' | \
|
||||
"$GREP" -e "^$1" | "$TR" -d '\r')
|
||||
"$GREP" -e "$1" | "$TR" -d '\r')
|
||||
# If multiple the last one to consider.
|
||||
value=$(echo -e "$value" | "$TAIL" -n1)
|
||||
# Right side of the equal sign W/O leading and trailing spaces and quotes.
|
||||
value=$(echo -ne "$value" | "$CUT" -d'=' -f2 | "$XARGS")
|
||||
# Right side of the colon W/O leading and trailing spaces and quotes.
|
||||
value=$(echo -ne "$value" | "$CUT" -d':' -f2 | "$XARGS")
|
||||
# Removes the trailing semicolon (if any).
|
||||
value=${value%;*}
|
||||
echo -e "$value"; return
|
||||
}
|
||||
# Gives up here silently if the type of the database isn't MySQL.
|
||||
[[ "$(parse "\$wgDBtype")" != 'mysql' ]] && exit 1
|
||||
# All parameters are mandatories.
|
||||
MYCONTAINER="$(parse "\$wgDBserver")"
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOPARAM \$wgDBserver" >&2; exit 1; fi
|
||||
MYDATABASE="$(parse "\$wgDBname")"
|
||||
if [ -z "$MYDATABASE" ]; then echo "$MSG_NOPARAM \$wgDBname" >&2; exit 1; fi
|
||||
MYUSER="$(parse "\$wgDBuser")"
|
||||
if [ -z "$MYUSER" ]; then echo "$MSG_NOPARAM \$wgDBuser" >&2; exit 1; fi
|
||||
MYPASSWORD="$(parse "\$wgDBpassword")"
|
||||
if [ -z "$MYPASSWORD" ]; then echo "$MSG_NOPARAM \$wgDBpassword" >&2; exit 1; fi
|
||||
MYCONTAINER="$PAR_SERVICE" # TODO: guess from the yml
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOPARAM PAR_SERVICE" >&2; exit 1; fi1; fi
|
||||
MYDATABASE="$(parse "MYSQL_DATABASE")"
|
||||
if [ -z "$MYDATABASE" ]; then echo "$MSG_NOPARAM MYSQL_DATABASE" >&2; exit 1; fi
|
||||
MYUSER="$(parse "MYSQL_USER")"
|
||||
if [ -z "$MYUSER" ]; then echo "$MSG_NOPARAM MYSQL_USER" >&2; exit 1; fi
|
||||
MYPASSWORD="$(parse "MYSQL_PASSWORD")"
|
||||
if [ -z "$MYPASSWORD" ]; then echo "$MSG_NOPARAM MYSQL_PASSWORD" >&2; exit 1; fi
|
||||
# We've the configuration parsed.
|
||||
|
||||
# Converts the database service name to an actual running container's name.
|
||||
#
|
||||
MYCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandtring ps -q "$MYCONTAINER") | "$CUT" -c2-)"
|
||||
MYCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$MYCONTAINER") | "$CUT" -c2-)"
|
||||
# Gives up here if failed.
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi
|
||||
|
||||
|
@ -11,6 +11,10 @@
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-05-21 v0.3.1
|
||||
# fix: Wrong variable name (BASE_DIR instead of SERVICE_BASE) in a check.
|
||||
# 2025-05-20 v0.3
|
||||
# fix: Wrong output redirection order (>/dev/null 2>&1 was reversed).
|
||||
# 2024-08-25 v0.2
|
||||
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
|
||||
# mod: Doesn't do backup if the service is down.
|
||||
@ -72,7 +76,7 @@ while [ -h "$SOURCE" ]; do
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
|
||||
# Let's find which version of docker-compose is installed.
|
||||
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then
|
||||
if [ $($DOCKER compose version >/dev/null 2>&1; echo $?) -eq 0 ]; then
|
||||
# We'll use v2 if it is available.
|
||||
DOCKER_COMPOSE="$DOCKER"
|
||||
commandstring="compose"
|
||||
@ -132,7 +136,7 @@ fi
|
||||
|
||||
# The service must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
[[ -z "$(cd "$SERVICE_BASE"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Attempts the backup commit.
|
||||
|
195
.recipes/mediawiki_mariadb/tools/restoredb_mysql.sh
Normal file
195
.recipes/mediawiki_mariadb/tools/restoredb_mysql.sh
Normal file
@ -0,0 +1,195 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Restores a composed MySQL/MariaDB database from a dump file.
|
||||
# Gets all necessary data from the docker-compose.yml file.
|
||||
#
|
||||
# This is a wrapper script to the system-wide mysql_restoredb tool.
|
||||
# Database recovey with the necessary user management and grants
|
||||
# requires superuser privileges in MySQL, but simple data recovery
|
||||
# is possible if the user and privileges are already set.
|
||||
#
|
||||
# You have to call this script as a Docker manager user (member of the
|
||||
# 'docker' Linux group). The worker tool must be available somewhere
|
||||
# in PATH. At least 5.7.6 MySQL or at least 10.1.3 MariaDB is required.
|
||||
#
|
||||
# Usage:
|
||||
# $0 path_to_the_dumpfile [ path_to_the_service's_base ]
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
#
|
||||
# 2025-02-26 v0.1 Forked from the Smartfront repository and rewritten.
|
||||
|
||||
# Accepted environment variables and their defaults.
|
||||
#
|
||||
PAR_SERVICE=${SERVICE:-"database"} # Database container's name
|
||||
|
||||
# Other initialisations.
|
||||
#
|
||||
BACKUPFOLDER="storage/backups/dumps" # Skeleton's default dump folder
|
||||
PROP_DBAPASS="MYSQL_ROOT_PASSWORD" # DB admin password property
|
||||
PROP_DBNAME="MYSQL_DATABASE" # DB name property
|
||||
PROP_DBPASS="MYSQL_PASSWORD" # DB password property
|
||||
PROP_DBUSER="MYSQL_USER" # DB username property
|
||||
USER=${USER:-LOGNAME} # Fix for cron enviroment only
|
||||
YMLFILE="docker-compose.yml"
|
||||
|
||||
# Basic environment settings.
|
||||
#
|
||||
LANG=C
|
||||
LC_ALL=C
|
||||
|
||||
# Messages.
|
||||
#
|
||||
MSG_BADDUMP="Fatal: doesn't exist or doesn't a dumpfile:"
|
||||
MSG_DOCKERGRPNEED="You must be a member of the docker group."
|
||||
MSG_DOESNOTRUN="This service doesn't run."
|
||||
MSG_MISSINGDEP="Fatal: missing dependency"
|
||||
MSG_MISSINGCONF="Fatal: missing config file"
|
||||
MSG_MISSINGYML="Fatal: didn't find the $YMLFILE file"
|
||||
MSG_NOLOCATE="Cannot locate the database container."
|
||||
MSG_NOPARAM="Missing environment parameter"
|
||||
|
||||
MSG_USAGE="Usage: $0 dump_pathname [ composition_base_pathname ]\n"
|
||||
MSG_USAGE+="ENVVAR:\n"
|
||||
MSG_USAGE+="SERVICE \tDatabase service's name in composition\n"
|
||||
|
||||
# Checks the dependencies.
|
||||
#
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in basename cat cut date dirname docker \
|
||||
grep id mysql_restoredb readlink tail xargs
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
# All dependencies are available via "$THECOMMAND" (upper case) call.
|
||||
#
|
||||
# Let's find which version of docker-compose is installed.
|
||||
if [ $($DOCKER compose version >/dev/null 2>&1; echo $?) -eq 0 ]; then
|
||||
# We'll use v2 if it is available.
|
||||
DOCKER_COMPOSE="$DOCKER"
|
||||
commandstring="compose"
|
||||
else
|
||||
# Otherwise falling back to v1.
|
||||
DOCKER_COMPOSE="$(which docker-compose)"
|
||||
commandstring=""
|
||||
fi
|
||||
# One of the two is mandatory.
|
||||
if [ -z "$DOCKER_COMPOSE" ];then echo "$MSG_MISSINGDEP docker-compose" >&2; exit 1; fi
|
||||
# Below docker-compose should be called as "$DOCKER_COMPOSE" $commandstring sequence.
|
||||
|
||||
# Where I'm?
|
||||
# https://gist.github.com/TheMengzor/968e5ea87e99d9c41782
|
||||
SOURCE="$0"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
SOURCE="$("$READLINK" "$SOURCE")"
|
||||
# if $SOURCE was a relative symlink, we need to resolve it
|
||||
# relative to the path where the symlink file was located
|
||||
[[ $SOURCE != /* ]] && SOURCE="$SCRPATH/$SOURCE"
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
|
||||
# Need to be root or a Docker manager user.
|
||||
#
|
||||
[[ "$USER" != 'root' ]] \
|
||||
&& [[ -z "$(echo "$("$ID" -Gn "$USER") " | "$GREP" ' docker ')" ]] \
|
||||
&& echo "$MSG_DOCKERGRPNEED" >&2 && exit 1 #"
|
||||
|
||||
# Gets the command line parameters.
|
||||
#
|
||||
# DUMPFILE is mandatory
|
||||
if [ -n "$1" ]; then DUMPFILE="$1"; shift
|
||||
else echo -e "$MSG_USAGE" >&2; exit 1; fi
|
||||
# SERVICE_BASE is optional
|
||||
if [ -n "$1" ]; then SERVICE_BASE="$1"; shift; fi
|
||||
# We've read the unchecked command line parameters.
|
||||
|
||||
# Searches the base folder, containing the YMLFILE.
|
||||
#
|
||||
if [ -z "$SERVICE_BASE" ]; then
|
||||
# Called from the base folder (./)?
|
||||
TEST_DIR="$SCRPATH"
|
||||
[[ -z "$SERVICE_BASE" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && SERVICE_BASE="$TEST_DIR"
|
||||
# Called from ./tools?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$SERVICE_BASE" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && SERVICE_BASE="$TEST_DIR"
|
||||
# Called from ./tools/*.d?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$SERVICE_BASE" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && SERVICE_BASE="$TEST_DIR"
|
||||
fi
|
||||
# On failure gives it up here.
|
||||
if [ -z "$SERVICE_BASE" -o ! -r "$SERVICE_BASE/$YMLFILE" ]; then
|
||||
echo "$MSG_MISSINGYML" >&2; exit 1
|
||||
fi
|
||||
# Sets the absolute path.
|
||||
YMLFILE="$SERVICE_BASE/$YMLFILE"
|
||||
# We've the YMLFILE.
|
||||
|
||||
# Finds the DUMPFILE to use.
|
||||
#
|
||||
# The DUMPFILE must point to a readable file.
|
||||
# If doesn't it tries the skeleton's standard backup folder as well.
|
||||
if [ ! -r "$DUMPFILE" ]
|
||||
then DUMPFILE="$("$DIRNAME" "$SERVICE_BASE")/$BACKUPFOLDER/$DUMPFILE"; fi
|
||||
# If it is an existing symlink dereferences it to ensure, it points to a file.
|
||||
if [ -h "$DUMPFILE" ]; then
|
||||
if [[ "$("$READLINK" "$DUMPFILE")" != /* ]]
|
||||
# relative path in symlink
|
||||
then DUMPFILE="$("$DIRNAME" "$DUMPFILE")/$("$READLINK" "$DUMPFILE")"
|
||||
# absolute path in symlink
|
||||
else DUMPFILE="$("$READLINK" "$DUMPFILE")"; fi
|
||||
fi
|
||||
# Let's check it!
|
||||
if [ ! -r "$DUMPFILE" -o ! -f "$DUMPFILE" ]
|
||||
then echo -e "$MSG_BADDUMP $DUMPFILE"; exit 1; fi
|
||||
# We've an existing dumpfile.
|
||||
|
||||
# The composition must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$SERVICE_BASE"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Parses the YMLFILE for parameters to use.
|
||||
#
|
||||
function parse { [[ -z "$1" ]] && return
|
||||
# Gets the live lines containing the parameter.
|
||||
value=$("$CAT" "$YMLFILE" | "$GREP" -ve '^#' | \
|
||||
"$GREP" -e "^ *$1" | "$TR" -d '\r')
|
||||
# If multiple the last one to consider.
|
||||
value=$(echo -e "$value" | "$TAIL" -n1)
|
||||
# Right side of the colon W/O leading and trailing spaces and quotes.
|
||||
value=$(echo -ne "$value" | "$CUT" -d':' -f2 | "$XARGS")
|
||||
# Removes the trailing semicolon (if any).
|
||||
value=${value%;*}
|
||||
echo -e "$value"; return
|
||||
}
|
||||
# These parameters are mandatory.
|
||||
MYCONTAINER="$PAR_SERVICE" # TODO: guess from the yml
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOPARAM PAR_SERVICE" >&2; exit 1; fi1; fi
|
||||
MYDATABASE="$(parse "$PROP_DBNAME")"
|
||||
if [ -z "$MYDATABASE" ]; then echo "$MSG_NOPARAM $PROP_DBNAME" >&2; exit 1; fi
|
||||
MYUSER="$(parse "$PROP_DBUSER")"
|
||||
if [ -z "$MYUSER" ]; then echo "$MSG_NOPARAM $PROP_DBUSER" >&2; exit 1; fi
|
||||
MYPASSWORD="$(parse "$PROP_DBPASS")"
|
||||
if [ -z "$MYPASSWORD" ]; then echo "$MSG_NOPARAM $PROP_DBPASS" >&2; exit 1; fi
|
||||
# These are optional.
|
||||
MYDBAUSER="root"
|
||||
MYDBAPASSWORD="$(parse "$PROP_DBAPASS")"
|
||||
# We've the configuration parsed.
|
||||
|
||||
# Converts the database service name to an actual running container's name.
|
||||
#
|
||||
MYCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$SERVICE_BASE"; "$DOCKER_COMPOSE" $commandstring ps -q "$MYCONTAINER") | "$CUT" -c2-)"
|
||||
# Gives up here if failed.
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi
|
||||
|
||||
# Calls the worker script to make the job.
|
||||
#
|
||||
export MYDBAUSER MYDBAPASSWORD MYPASSWORD
|
||||
"$MYSQL_RESTOREDB" -C "$MYCONTAINER" -U "$MYUSER" "$MYDATABASE" "$DUMPFILE"
|
||||
|
||||
# That's all, Folks! :)
|
7
.recipes/nodejs_mongodb_mongoxp/.gitignore
vendored
Normal file
7
.recipes/nodejs_mongodb_mongoxp/.gitignore
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
# Ignore everything else in this directory.
|
||||
*
|
||||
!storage
|
||||
!tools
|
||||
!.gitignore
|
||||
!README.md
|
||||
!docker-compose.yml
|
0
.recipes/nodejs_mongodb_mongoxp/README.md
Normal file
0
.recipes/nodejs_mongodb_mongoxp/README.md
Normal file
113
.recipes/nodejs_mongodb_mongoxp/docker-compose.yml
Normal file
113
.recipes/nodejs_mongodb_mongoxp/docker-compose.yml
Normal file
@ -0,0 +1,113 @@
|
||||
# Node.js with MongoDB and Mongo-Express tool.
|
||||
#
|
||||
# Provides a JavaScript runtime environment with MongoDB backend.
|
||||
# Assumes a suitable JS application in nodejs-apps volume.
|
||||
#
|
||||
services:
|
||||
#
|
||||
# https://hub.docker.com/_/node
|
||||
# https://github.com/nodejs/docker-node
|
||||
#
|
||||
nodejs:
|
||||
image: node:latest
|
||||
restart: unless-stopped
|
||||
# Choose a suitable Linux account here.
|
||||
# Must have R/W access to the nodejs-apps volume.
|
||||
user: "1001"
|
||||
# The application defines the port(s) to expose.
|
||||
# Take a look the possible public port collision.
|
||||
ports:
|
||||
- 8201:8080
|
||||
links:
|
||||
- mongodb
|
||||
volumes:
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- ./storage/volumes/nodejs-apps:/home/node/app
|
||||
environment:
|
||||
TZ: Europe/Budapest
|
||||
NODE_ENV: production
|
||||
NPM_CONFIG_CACHE: /home/node/app/node_modules/.cache
|
||||
NPM_CONFIG_LOGLEVEL: info
|
||||
#
|
||||
# Environment variables to control the docker-skeleton's
|
||||
# external backup. The Node.JS image doesn't interpret them.
|
||||
# You may specify the relevant folders for the backup utility.
|
||||
# By default it backups the entire nodejs-apps folder.
|
||||
DS_BACKUP_FOLDERS: ''
|
||||
# These folders below will be excluded from the backup.
|
||||
DS_BACKUP_EXCLUDES: ''
|
||||
# You may specify the relevant MongoDB database(s) as well.
|
||||
DS_BACKUP_DATABASES: ''
|
||||
#
|
||||
# Starting the application via npm and package.json:
|
||||
#command: sh -c "cd /home/node/app && npm install && npm start"
|
||||
# Starting a single file application (testing only):
|
||||
command: node /home/node/app/helloworld.js
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: true
|
||||
#
|
||||
# https://hub.docker.com/_/mongo
|
||||
# https://github.com/docker-library/mongo
|
||||
#
|
||||
mongodb:
|
||||
image: mongo:latest
|
||||
restart: unless-stopped
|
||||
# Choose a suitable Linux account here.
|
||||
# Must have R/W access to the mongodb-data volume.
|
||||
user: "1001"
|
||||
volumes:
|
||||
- ./storage/volumes/mongodb-data:/data/db
|
||||
environment:
|
||||
MONGO_INITDB_DATABASE: admin
|
||||
# Sets the DBA (root) credentials below.
|
||||
MONGO_INITDB_ROOT_USERNAME: admin
|
||||
# It is highly recommended to change this to a strong random password.
|
||||
# https://passwordsgenerator.net/
|
||||
MONGO_INITDB_ROOT_PASSWORD: secret-1
|
||||
# Sets the DBA (root) credentials below.
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: true
|
||||
#
|
||||
# https://hub.docker.com/_/mongo-express
|
||||
# https://github.com/mongo-express/mongo-express
|
||||
# https://github.com/mongo-express/mongo-express-docker
|
||||
#
|
||||
mongoxp:
|
||||
image: mongo-express
|
||||
restart: unless-stopped
|
||||
# Take a look the possible public port collision.
|
||||
ports:
|
||||
- 8202:8081
|
||||
links:
|
||||
- mongodb
|
||||
environment:
|
||||
# Override the default value set in the docker-entrypoint.sh:
|
||||
# https://github.com/mongo-express/mongo-express-docker/issues/21
|
||||
ME_CONFIG_MONGODB_URL: fake,fake
|
||||
ME_CONFIG_MONGODB_SERVER: mongodb
|
||||
ME_CONFIG_MONGODB_PORT: 27017
|
||||
ME_CONFIG_SITE_BASEURL: /mongoxp/
|
||||
# We don't use SSL behind a local reverse proxy.
|
||||
ME_CONFIG_SITE_SSL_ENABLED: false
|
||||
ME_CONFIG_SITE_SSL_CRT_PATH: ''
|
||||
ME_CONFIG_SITE_SSL_KEY_PATH: ''
|
||||
# We use the root account here.
|
||||
ME_CONFIG_MONGODB_ENABLE_ADMIN: true
|
||||
# Must match MONGO_INITDB_ROOT_* credentials.
|
||||
ME_CONFIG_MONGODB_ADMINUSERNAME: admin
|
||||
ME_CONFIG_MONGODB_ADMINPASSWORD: secret-1
|
||||
# It is recommended to use at least a basic authentication.
|
||||
ME_CONFIG_BASICAUTH: true
|
||||
ME_CONFIG_BASICAUTH_USERNAME: admin
|
||||
# It is highly recommended to change this to a strong random password.
|
||||
# https://passwordsgenerator.net/
|
||||
ME_CONFIG_BASICAUTH_PASSWORD: secret-2
|
||||
ME_CONFIG_OPTIONS_EDITORTHEME: ambiance
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: true
|
4
.recipes/nodejs_mongodb_mongoxp/storage/.gitignore
vendored
Normal file
4
.recipes/nodejs_mongodb_mongoxp/storage/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
# Ignore everything in this directory except this folders.
|
||||
*
|
||||
!.gitignore
|
||||
!volumes
|
5
.recipes/nodejs_mongodb_mongoxp/storage/volumes/.gitignore
vendored
Normal file
5
.recipes/nodejs_mongodb_mongoxp/storage/volumes/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
# Ignore everything in this directory except this folders.
|
||||
*
|
||||
!.gitignore
|
||||
!nodejs-apps
|
||||
!mongodb-data
|
3
.recipes/nodejs_mongodb_mongoxp/storage/volumes/mongodb-data/.gitignore
vendored
Normal file
3
.recipes/nodejs_mongodb_mongoxp/storage/volumes/mongodb-data/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
# Ignore everything in this directory except this folders.
|
||||
*
|
||||
!.gitignore
|
4
.recipes/nodejs_mongodb_mongoxp/storage/volumes/nodejs-apps/.gitignore
vendored
Normal file
4
.recipes/nodejs_mongodb_mongoxp/storage/volumes/nodejs-apps/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
# Ignore everything in this directory except this folders.
|
||||
*
|
||||
!.gitignore
|
||||
!helloworld.js
|
@ -0,0 +1,11 @@
|
||||
/*
|
||||
A humble test web application.
|
||||
https://www.w3schools.com/nodejs/nodejs_get_started.asp
|
||||
*/
|
||||
|
||||
var http = require('http');
|
||||
|
||||
http.createServer(function (req, res) {
|
||||
res.writeHead(200, {'Content-Type': 'text/html'});
|
||||
res.end('Hello World!');
|
||||
}).listen(8080);
|
4
.recipes/nodejs_mongodb_mongoxp/tools/.gitignore
vendored
Normal file
4
.recipes/nodejs_mongodb_mongoxp/tools/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
# Ignore everything else in this directory.
|
||||
*
|
||||
!*.d
|
||||
!.gitignore
|
169
.recipes/nodejs_mongodb_mongoxp/tools/backup.d/mongodb_dump.sh
Normal file
169
.recipes/nodejs_mongodb_mongoxp/tools/backup.d/mongodb_dump.sh
Normal file
@ -0,0 +1,169 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# A service script to dump the relevant MongoDB database(s)
|
||||
# of a docker-composed MongoDB instance. Creates a tarball in
|
||||
# $BASE_DIR/storage/backups/tarballs folder (by default).
|
||||
# The relevant databases must be specified within the
|
||||
# docker-compose.yml in a BACKUP_DATABASES environment variable.
|
||||
# An optional parameter may change the target folder.
|
||||
#
|
||||
# Call as a Docker manager user (member of the docker Linux group) via cron.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
# 2025-03-05 v0.1.1
|
||||
# mod: minimally rewrited the description.
|
||||
# 2024-09-23 v0.1 Initial version.
|
||||
|
||||
# Accepted environment variables and their defaults.
|
||||
#
|
||||
PAR_BASEDIR=${PAR_BASEDIR:-""} # Service's base folder
|
||||
PAR_BACKUPDIR=${PAR_BACKUPDIR:-""} # Folder to dump within
|
||||
|
||||
# Messages (maybe overridden by configuration).
|
||||
#
|
||||
MSG_DOCKERGRPNEED="You must be a member of the docker group."
|
||||
MSG_DOESNOTRUN="This service doesn't run."
|
||||
MSG_MISSINGDEP="Fatal: missing dependency"
|
||||
MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
|
||||
MSG_NONWRITE="The target directory isn't writable"
|
||||
MSG_NOLOCATE="Cannot locate the MongoDB container."
|
||||
MSG_NOPARAM="Missing environment parameter"
|
||||
|
||||
|
||||
# Other initialisations.
|
||||
#
|
||||
BACKUPDIR="storage/backups/tarballs" # Folder to dump within
|
||||
PAR_DATABASES="DS_BACKUP_DATABASES" # List of DB(s) in YMLFILE
|
||||
SERVICENAME="mongodb" # The composed MongoDB service
|
||||
USER=${USER:-LOGNAME} # Fix for cron enviroment only
|
||||
YMLFILE="docker-compose.yml" # Gets the parameters from here
|
||||
|
||||
# Checks the dependencies.
|
||||
#
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in basename cat cut date dirname docker \
|
||||
grep gzip hostname id pwd tail xargs
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
# All dependencies are available via "$THECOMMAND" (upper case) call.
|
||||
#
|
||||
# Let's find which version of docker-compose is installed.
|
||||
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then
|
||||
# We'll use v2 if it is available.
|
||||
DOCKER_COMPOSE="$DOCKER"
|
||||
commandstring="compose"
|
||||
else
|
||||
# Otherwise falling back to v1.
|
||||
DOCKER_COMPOSE="$(which docker-compose)"
|
||||
commandstring=""
|
||||
fi
|
||||
# One of the two is mandatory.
|
||||
if [ -z "$DOCKER_COMPOSE" ];then echo "$MSG_MISSINGDEP docker-compose" >&2; exit 1; fi
|
||||
# Below docker-compose should be called as "$DOCKER_COMPOSE" $commandstring sequence.
|
||||
#
|
||||
# An additional bugfix (use "$(which gzip)" instead of "$GZIP"):
|
||||
# https://www.gnu.org/software/gzip/manual/html_node/Environment.html
|
||||
GZIP=""
|
||||
|
||||
# Where I'm?
|
||||
# https://gist.github.com/TheMengzor/968e5ea87e99d9c41782
|
||||
SOURCE="$0"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
SOURCE="$("$READLINK" "$SOURCE")"
|
||||
# if $SOURCE was a relative symlink, we need to resolve it
|
||||
# relative to the path where the symlink file was located
|
||||
[[ $SOURCE != /* ]] && SOURCE="$SCRPATH/$SOURCE"
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
|
||||
# Need to be root or a Docker manager user.
|
||||
#
|
||||
[[ "$USER" != 'root' ]] \
|
||||
&& [[ -z "$(echo "$("$ID" -Gn "$USER") " | "$GREP" ' docker ')" ]] \
|
||||
&& echo "$MSG_DOCKERGRPNEED" >&2 && exit 1 #"
|
||||
|
||||
# Searches the base folder, containing a docker-compose.yml file.
|
||||
#
|
||||
# Called from the base folder (./)?
|
||||
BASE_DIR="$PAR_BASEDIR"
|
||||
TEST_DIR="$SCRPATH"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools/*.d?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# On failure gives it up here.
|
||||
if [ -z "$BASE_DIR" -o ! -r "$BASE_DIR/$YMLFILE" ]; then
|
||||
echo "$MSG_MISSINGYML" >&2; exit 1
|
||||
fi
|
||||
# Sets the absolute paths.
|
||||
BACKUPDIR="${PAR_BACKUPDIR:-$BASE_DIR/$BACKUPDIR}"
|
||||
|
||||
# The dump target folder must be writable.
|
||||
#
|
||||
[[ ! -w "$BACKUPDIR" ]] \
|
||||
&& echo "$MSG_NONWRITE: $BACKUPDIR" >&2 && exit 1
|
||||
|
||||
# The service must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Converts the MongoDB service name to an actual running container's name.
|
||||
#
|
||||
MDBCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$SERVICENAME") | "$CUT" -c2-)"
|
||||
# Gives up here if failed.
|
||||
if [ -z "$MDBCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi
|
||||
|
||||
# Checks and parses the config file for database names to dump
|
||||
# and DBA (root) credentials for MongoDB.
|
||||
#
|
||||
function parse { [[ -z "$1" ]] && return
|
||||
# Gets the live lines containing the parameter.
|
||||
value=$("$CAT" "$CONFFILE" 2>/dev/null | "$GREP" -ve '^#' | \
|
||||
"$GREP" -e " $1:" | "$TR" -d '\r')
|
||||
# If multiple the last one to consider.
|
||||
value=$(echo -e "$value" | "$TAIL" -n1)
|
||||
# Right side of the equal sign W/O leading and trailing spaces and quotes.
|
||||
value=$(echo -ne "$value" | "$CUT" -d':' -f2 | "$XARGS")
|
||||
echo -e "$value"; return
|
||||
}
|
||||
# Examines the YMLFILE.
|
||||
CONFFILE="$BASE_DIR/$YMLFILE"
|
||||
# Gets the list of the databases to dump. Silently exits if it is empty.
|
||||
DATABASES="$(parse "$PAR_DATABASES")"
|
||||
if [ -z "$DATABASES" ]; then exit; fi
|
||||
# All parameters below are mandatories.
|
||||
DBAUTH="$(parse "MONGO_INITDB_DATABASE")"
|
||||
if [ -z "$DBAUTH" ]; then echo "$MSG_NOPARAM MONGO_INITDB_DATABASE" >&2; exit 1; fi
|
||||
DBUSER="$(parse "MONGO_INITDB_ROOT_USERNAME")"
|
||||
if [ -z "$DBAUTH" ]; then echo "$MSG_NOPARAM MONGO_INITDB_ROOT_USERNAME" >&2; exit 1; fi
|
||||
DBPASS="$(parse "MONGO_INITDB_ROOT_PASSWORD")"
|
||||
if [ -z "$DBAUTH" ]; then echo "$MSG_NOPARAM MONGO_INITDB_ROOT_PASSWORD" >&2; exit 1; fi
|
||||
# We've the configuration parsed.
|
||||
|
||||
# Attempts the dump(s) using the mongodump utility existing within the container.
|
||||
# Uses the DBA (root) credentials parsed from the YMLFILE above.
|
||||
#
|
||||
if [ -w "$BACKUPDIR" ]; then
|
||||
# Enumerates the relevant databases (if any).
|
||||
for DBNAME in $DATABASES ''
|
||||
do
|
||||
# Dumps the actual database as a DBA.
|
||||
if [ -n "$DBNAME" ]; then
|
||||
BACKUP_NAME=$SERVICENAME-$DBNAME.$("$DATE" '+%Y%m%d_%H%M%S').$("$HOSTNAME")
|
||||
"$DOCKER" exec $MDBCONTAINER sh -c "exec mongodump -u $DBUSER -p $DBPASS --authenticationDatabase $DBAUTH -d $DBNAME --quiet --archive" | \
|
||||
"$(which gzip)" > "$BACKUPDIR/$BACKUP_NAME.archive.gz" 2>>"$BACKUPDIR/$BACKUP_NAME.log"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# That's all, Folks! :)
|
152
.recipes/nodejs_mongodb_mongoxp/tools/backup.d/storage_backup.sh
Normal file
152
.recipes/nodejs_mongodb_mongoxp/tools/backup.d/storage_backup.sh
Normal file
@ -0,0 +1,152 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# A service script to backup the application's storage (with exceptions)
|
||||
# of a docker-composed Node.JS instance. Creates a tarball in
|
||||
# $BASE_DIR/storage/backups/tarballs folder (by default). An optional
|
||||
# parameter may change the target folder.
|
||||
#
|
||||
# The contents of the tarball can be refined by setting the SD_BACKUP_*
|
||||
# environment variables in the docker-compose.yml file.
|
||||
#
|
||||
# Call as a Docker manager user (member of the docker Linux group) via cron.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
# 2024-09-23 v0.1 Initial version.
|
||||
|
||||
# Accepted environment variables and their defaults.
|
||||
#
|
||||
PAR_BASEDIR=${PAR_BASEDIR:-""} # Service's base folder
|
||||
PAR_BACKUPDIR=${PAR_BACKUPDIR:-""} # Folder to dump within
|
||||
|
||||
# Messages (maybe overridden by configuration).
|
||||
#
|
||||
MSG_DOCKERGRPNEED="You must be a member of the docker group."
|
||||
MSG_DOESNOTRUN="This service doesn't run."
|
||||
MSG_MISSINGDEP="Fatal: missing dependency"
|
||||
MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
|
||||
MSG_NONWRITE="The target directory isn't writable"
|
||||
MSG_NOLOCATE="Cannot locate the Node.JS container."
|
||||
|
||||
# Other initialisations.
|
||||
#
|
||||
APPSDIR="/home/node/app" # Base folder of storage to dump
|
||||
BACKUPDIR="storage/backups/tarballs" # Folder to dump within
|
||||
SERVICENAME="nodejs" # The composed Node.JS service
|
||||
USER=${USER:-LOGNAME} # Fix for cron enviroment only
|
||||
YMLFILE="docker-compose.yml"
|
||||
|
||||
# Checks the dependencies.
|
||||
#
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in basename cat cut date dirname docker \
|
||||
find grep hostname id pwd tail xargs
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
# All dependencies are available via "$THECOMMAND" (upper case) call.
|
||||
#
|
||||
# Let's find which version of docker-compose is installed.
|
||||
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then
|
||||
# We'll use v2 if it is available.
|
||||
DOCKER_COMPOSE="$DOCKER"
|
||||
commandstring="compose"
|
||||
else
|
||||
# Otherwise falling back to v1.
|
||||
DOCKER_COMPOSE="$(which docker-compose)"
|
||||
commandstring=""
|
||||
fi
|
||||
# One of the two is mandatory.
|
||||
if [ -z "$DOCKER_COMPOSE" ];then echo "$MSG_MISSINGDEP docker-compose" >&2; exit 1; fi
|
||||
# Below docker-compose should be called as "$DOCKER_COMPOSE" $commandstring sequence.
|
||||
|
||||
# Where I'm?
|
||||
# https://gist.github.com/TheMengzor/968e5ea87e99d9c41782
|
||||
SOURCE="$0"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
SOURCE="$("$READLINK" "$SOURCE")"
|
||||
# if $SOURCE was a relative symlink, we need to resolve it
|
||||
# relative to the path where the symlink file was located
|
||||
[[ $SOURCE != /* ]] && SOURCE="$SCRPATH/$SOURCE"
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
|
||||
# Need to be root or a Docker manager user.
|
||||
#
|
||||
[[ "$USER" != 'root' ]] \
|
||||
&& [[ -z "$(echo "$("$ID" -Gn "$USER") " | "$GREP" ' docker ')" ]] \
|
||||
&& echo "$MSG_DOCKERGRPNEED" >&2 && exit 1 #"
|
||||
|
||||
# Searches the base folder, containing a docker-compose.yml file.
|
||||
#
|
||||
# Called from the base folder (./)?
|
||||
BASE_DIR="$PAR_BASEDIR"
|
||||
TEST_DIR="$SCRPATH"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools/*.d?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# On failure gives it up here.
|
||||
if [ -z "$BASE_DIR" -o ! -r "$BASE_DIR/$YMLFILE" ]; then
|
||||
echo "$MSG_MISSINGYML" >&2; exit 1
|
||||
fi
|
||||
# Sets the absolute paths.
|
||||
BACKUPDIR="${PAR_BACKUPDIR:-$BASE_DIR/$BACKUPDIR}"
|
||||
|
||||
# The dump target folder must be writable.
|
||||
#
|
||||
[[ ! -w "$BACKUPDIR" ]] \
|
||||
&& echo "$MSG_NONWRITE: $BACKUPDIR" >&2 && exit 1
|
||||
|
||||
# The service must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Converts the Node.JS service name to an actual running container's name.
|
||||
#
|
||||
NDCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$SERVICENAME") | "$CUT" -c2-)"
|
||||
# Gives up here if failed.
|
||||
if [ -z "$NDCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi
|
||||
|
||||
# Checks and parses the config file for the folder (path)names
|
||||
# to dump and to exclude.
|
||||
#
|
||||
function parse { [[ -z "$1" ]] && return
|
||||
# Gets the live lines containing the parameter.
|
||||
value=$("$CAT" "$CONFFILE" 2>/dev/null | "$GREP" -ve '^#' | \
|
||||
"$GREP" -e " $1:" | "$TR" -d '\r')
|
||||
# If multiple the last one to consider.
|
||||
value=$(echo -e "$value" | "$TAIL" -n1)
|
||||
# Right side of the equal sign W/O leading and trailing spaces and quotes.
|
||||
value=$(echo -ne "$value" | "$CUT" -d':' -f2 | "$XARGS")
|
||||
echo -e "$value"; return
|
||||
}
|
||||
# Examines the YMLFILE.
|
||||
CONFFILE="$BASE_DIR/$YMLFILE"
|
||||
# Gets the list of folders to dump and makes the path relative (some sanitization).
|
||||
# Sets the app's root if no folders given.
|
||||
FOLDERS=""
|
||||
for folder in $(parse "DS_BACKUP_FOLDERS") ''; do [[ -n "$folder" ]] && FOLDERS+=" ./$folder"; done
|
||||
[[ -z "$FOLDERS" ]] && FOLDERS="."
|
||||
# Gets the list of excludes as well. Converts them to tar parameters.
|
||||
EXCLUDES=""
|
||||
for exclude in $(parse "DS_BACKUP_EXCLUDES") ''; do [[ -n "$exclude" ]] && EXCLUDES+="--exclude='./$exclude' "; done
|
||||
# We've folders and excludes prepared.
|
||||
|
||||
# Tries the FS backup.
|
||||
if [ -w "$BACKUPDIR" ]; then
|
||||
BACKUP_NAME="storage.$("$DATE" '+%Y%m%d_%H%M%S').$("$HOSTNAME")"
|
||||
"$DOCKER" exec $NDCONTAINER sh \
|
||||
-c "cd $APPSDIR; tar $EXCLUDES -cz $FOLDERS" \
|
||||
> "$BACKUPDIR/$BACKUP_NAME.tgz" 2>>"$BACKUPDIR/$BACKUP_NAME.log"
|
||||
fi
|
||||
|
||||
# That's all, Folks! :)
|
@ -0,0 +1,33 @@
|
||||
# Includable nginx configuration.
|
||||
#
|
||||
# Additional Filebrowser service.
|
||||
# Take a look to the proxy port setting.
|
||||
location ~ /(api|files|login|static) {
|
||||
proxy_pass http://localhost:$PAR_PROXYPORT;
|
||||
error_page 500 502 503 504 @proxy_error;
|
||||
|
||||
client_max_body_size 1G;
|
||||
keepalive_timeout 30;
|
||||
proxy_read_timeout 300;
|
||||
|
||||
proxy_request_buffering on;
|
||||
proxy_buffers 2048 16k;
|
||||
proxy_buffer_size 16k;
|
||||
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-Server $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# This hack reverts a modification in Filebrowser (2.33.1):
|
||||
# https://github.com/filebrowser/filebrowser/commit/8a14018861fe581672bbd27cdc3ae5691f70a108
|
||||
# We don't force to download PDFs.
|
||||
more_clear_headers -t 'application/pdf' 'Content-Disposition';
|
||||
|
||||
#websockets
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
}
|
1
.recipes/staticweb_filebrowser/config/filebrowser-config
Symbolic link
1
.recipes/staticweb_filebrowser/config/filebrowser-config
Symbolic link
@ -0,0 +1 @@
|
||||
../storage/volumes/filebrowser_data
|
32
.recipes/staticweb_filebrowser/docker-compose.yml
Normal file
32
.recipes/staticweb_filebrowser/docker-compose.yml
Normal file
@ -0,0 +1,32 @@
|
||||
# Static website with Filebrowser as an admin tool.
|
||||
#
|
||||
services:
|
||||
# https://github.com/filebrowser/filebrowser
|
||||
# https://hub.docker.com/r/filebrowser/filebrowser
|
||||
filebrowser:
|
||||
image: filebrowser/filebrowser:latest
|
||||
restart: unless-stopped
|
||||
# Take care a possible public port collision.
|
||||
ports:
|
||||
- 8201:80
|
||||
# The same Linux user running the reverse proxy webserver.
|
||||
user: 33:1001
|
||||
environment:
|
||||
TZ: Europe/Budapest
|
||||
# Default credentials: admin/admin
|
||||
# Note, FB_NOAUTH only matters if the database is still empty.
|
||||
#FB_NOAUTH: true
|
||||
FB_BASEURL: "/"
|
||||
volumes:
|
||||
# The Linux user defined above must have R/W access here.
|
||||
- ./storage/volumes/staticweb:/srv
|
||||
# Proper mounts before 2.33.0 version (2025-06-18).
|
||||
#- ./storage/volumes/filebrowser_data/database.db:/.database.db
|
||||
#- ./storage/volumes/filebrowser_data/filebrowser.json:/.filebrowser.json
|
||||
# Proper mounts since 2.33.0 version (2025-06-18).
|
||||
- ./storage/volumes/filebrowser_data/database.db:/database/filebrowser.db
|
||||
- ./storage/volumes/filebrowser_data/filebrowser-new.json:/config/settings.json
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: true
|
@ -0,0 +1,8 @@
|
||||
{
|
||||
"port": 80,
|
||||
"baseURL": "/",
|
||||
"address": "",
|
||||
"log": "stdout",
|
||||
"database": "/database/filebrowser.db",
|
||||
"root": "/srv"
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
{
|
||||
"port": 80,
|
||||
"baseURL": "/",
|
||||
"address": "",
|
||||
"log": "stdout",
|
||||
"database": "/.database.db",
|
||||
"root": "/srv"
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test page</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>It works!</h1>
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,93 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# A service script to backup the web storage of a static website.
|
||||
# Creates a tarball in $BASE_DIR/storage/backups/tarballs folder
|
||||
# (by default). Optional parameters may change the source and/or
|
||||
# target folder.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
# 2025-01-22 Initial version.
|
||||
|
||||
# Accepted environment variables and their defaults.
|
||||
#
|
||||
PAR_BASEDIR=${PAR_BASEDIR:-""} # Service's base folder
|
||||
PAR_BACKUPDIR=${PAR_BACKUPDIR:-""} # Folder to dump within
|
||||
PAR_SOURCEDIR=${PAR_SOURCEDIR:-""} # Folder to save
|
||||
|
||||
# Messages (maybe overridden by configuration).
|
||||
#
|
||||
MSG_MISSINGDEP="Fatal: missing dependency"
|
||||
MSG_NONREAD="The source directory isn't readable"
|
||||
MSG_NONWRITE="The target directory isn't writable"
|
||||
|
||||
# Other initialisations.
|
||||
#
|
||||
BACKUPDIR="storage/backups/tarballs" # Folder to dump within
|
||||
SOURCEDIR="storage/volumes/staticweb" # Folder to backup
|
||||
USER=${USER:-LOGNAME} # Fix for cron enviroment only
|
||||
YMLFILE="docker-compose.yml"
|
||||
|
||||
# Checks the dependencies.
|
||||
#
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in basename cat cut date dirname hostname pwd tar
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
# All dependencies are available via "$THECOMMAND" (upper case) call.
|
||||
|
||||
# Where I'm?
|
||||
# https://gist.github.com/TheMengzor/968e5ea87e99d9c41782
|
||||
SOURCE="$0"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
SOURCE="$("$READLINK" "$SOURCE")"
|
||||
# if $SOURCE was a relative symlink, we need to resolve it
|
||||
# relative to the path where the symlink file was located
|
||||
[[ $SOURCE != /* ]] && SOURCE="$SCRPATH/$SOURCE"
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
|
||||
# Searches the base folder, containing a docker-compose.yml file.
|
||||
#
|
||||
# Called from the base folder (./)?
|
||||
BASE_DIR="$PAR_BASEDIR"
|
||||
TEST_DIR="$SCRPATH"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools/*.d?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# On failure gives it up here.
|
||||
if [ -z "$BASE_DIR" -o ! -r "$BASE_DIR/$YMLFILE" ]; then
|
||||
echo "$MSG_MISSINGYML" >&2; exit 1
|
||||
fi
|
||||
# Sets the absolute paths.
|
||||
BACKUPDIR="${PAR_BACKUPDIR:-$BASE_DIR/$BACKUPDIR}"
|
||||
SOURCEDIR="${PAR_SOURCEDIR:-$BASE_DIR/$SOURCEDIR}"
|
||||
|
||||
# The dump target folder must be writable.
|
||||
#
|
||||
[[ ! -w "$BACKUPDIR" ]] \
|
||||
&& echo "$MSG_NONWRITE: $BACKUPDIR" >&2 && exit 1
|
||||
|
||||
# The source folder must be readable.
|
||||
#
|
||||
[[ ! -r "$SOURCEDIR" ]] \
|
||||
&& echo "$MSG_NONREAD: $SOURCEDIR" >&2 && exit 1
|
||||
|
||||
# Tries the FS backup.
|
||||
#
|
||||
if [ -w "$BACKUPDIR" ]; then
|
||||
BACKUP_NAME=$("$BASENAME" "$SOURCEDIR").$("$DATE" '+%Y%m%d_%H%M%S').$("$HOSTNAME")
|
||||
(cd $SOURCEDIR; "$TAR" cz . \
|
||||
> "$BACKUPDIR/$BACKUP_NAME.tgz" 2>>"$BACKUPDIR/$BACKUP_NAME.log")
|
||||
fi
|
||||
|
||||
# That's all, Folks! :)
|
0
.recipes/wondercms_php8/README.md
Normal file
0
.recipes/wondercms_php8/README.md
Normal file
46
.recipes/wondercms_php8/docker-compose.yml
Normal file
46
.recipes/wondercms_php8/docker-compose.yml
Normal file
@ -0,0 +1,46 @@
|
||||
# WonderCMS Flat File application with official Apache 2.4.x and PHP 8.x.
|
||||
#
|
||||
# This recipe doesn't extend the official image, it keeps all necessary
|
||||
# modifications in the container. The application itself must be added
|
||||
# with a persistent volume.
|
||||
#
|
||||
# Based on https://github.com/robiso/docker-wondercms/
|
||||
#
|
||||
services:
|
||||
# https://github.com/WonderCMS/wondercms
|
||||
# https://github.com/robiso/docker-wondercms/
|
||||
# https://hub.docker.com/_/php
|
||||
wondercms:
|
||||
image: php:8-apache
|
||||
restart: unless-stopped
|
||||
# Take a look the possible public port collision.
|
||||
ports:
|
||||
- 8201:80
|
||||
volumes:
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
# Needs R/W for UID 33 (www-data).
|
||||
- ./storage/volumes/wonder_html/:/var/www/html/
|
||||
environment:
|
||||
TZ: Europe/Budapest
|
||||
# We don't want to extend the official image to maintain
|
||||
# watchtower's monitoring for updates. So we use CMD to
|
||||
# make all the necessary changes. Unfortunately this will
|
||||
# slightly prolong the start of the service.
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
DEBIAN_FRONTEND=noninteractive apt update
|
||||
apt install -y libzip-dev zip
|
||||
apt clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
docker-php-ext-configure zip
|
||||
docker-php-ext-install zip
|
||||
a2enmod rewrite
|
||||
cp -p /usr/local/etc/php/php.ini-production /usr/local/etc/php/conf.d/php.ini
|
||||
apache2-foreground
|
||||
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: true
|
@ -0,0 +1,8 @@
|
||||
Options -Indexes
|
||||
ServerSignature Off
|
||||
RewriteEngine on
|
||||
RewriteCond %{REQUEST_FILENAME} !-f
|
||||
RewriteCond %{REQUEST_FILENAME} !-d
|
||||
RewriteRule ^(.+)$ index.php?page=$1 [QSA,L]
|
||||
RewriteRule database.js - [F]
|
||||
RewriteRule cache.json - [F]
|
3068
.recipes/wondercms_php8/storage/volumes/wonder_html/index.php
Normal file
3068
.recipes/wondercms_php8/storage/volumes/wonder_html/index.php
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
@ -0,0 +1,557 @@
|
||||
@font-face {
|
||||
font-family: 'Catamaran';
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-display: swap;
|
||||
src: url('fonts/catamaran-v7-latin-ext_latin-regular.woff2') format('woff2')
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'Catamaran';
|
||||
font-style: normal;
|
||||
font-weight: 700;
|
||||
font-display: swap;
|
||||
src: url('fonts/catamaran-v7-latin-ext_latin-700.woff2') format('woff2')
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'Catamaran';
|
||||
font-style: normal;
|
||||
font-weight: 900;
|
||||
font-display: swap;
|
||||
src: url('fonts/catamaran-v7-latin-ext_latin-900.woff2') format('woff2')
|
||||
}
|
||||
|
||||
html, body, div, span, applet, object,
|
||||
iframe, h1, h2, h3, h4, h5, h6, p, blockquote,
|
||||
pre, a, abbr, acronym, address, big, cite,
|
||||
code, del, dfn, em, img, ins, kbd, q, s, samp, strike, strong, sub, sup, tt, var, b,
|
||||
u, i, center, dl, dt, dd, li, fieldset,
|
||||
form, label, legend, caption,
|
||||
tfoot, article, aside,
|
||||
canvas, details, embed, figure, figcaption,
|
||||
footer, header, hgroup, menu, nav, output, ruby,
|
||||
section, summary, time, mark, audio, video {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
border: 0;
|
||||
font-size: 100%;
|
||||
font: inherit;
|
||||
vertical-align: baseline
|
||||
|
||||
}
|
||||
|
||||
html, body {
|
||||
box-shadow: 0 0 200px rgba(0, 0, 0, 0.27) inset;
|
||||
background-image: -webkit-linear-gradient(45deg, rgb(102, 95, 238) 0%, rgb(39, 194, 222) 100%);
|
||||
min-height: 100%;
|
||||
font-family: 'Catamaran';
|
||||
color: #fff !important
|
||||
}
|
||||
.actions li {
|
||||
list-style: none
|
||||
}
|
||||
|
||||
input::-moz-focus-inner {
|
||||
border: 0;
|
||||
padding: 0
|
||||
}
|
||||
|
||||
/* Basic */
|
||||
html {
|
||||
box-sizing: border-box
|
||||
}
|
||||
|
||||
*, *:before, *:after {
|
||||
box-sizing: inherit
|
||||
}
|
||||
|
||||
/* Type */
|
||||
body, select, textarea {
|
||||
color: rgba(255, 255, 255, 0.8);
|
||||
font-size: 16.5pt;
|
||||
font-weight: normal;
|
||||
line-height: 1.75
|
||||
}
|
||||
@media screen and (max-width: 1680px) {
|
||||
body, input, select, textarea {
|
||||
font-size: 13pt
|
||||
}
|
||||
}
|
||||
@media screen and (max-width: 1280px) {
|
||||
body, input, select, textarea {
|
||||
font-size: 12pt
|
||||
}
|
||||
}
|
||||
@media screen and (max-width: 360px) {
|
||||
body, input, select, textarea {
|
||||
font-size: 11pt
|
||||
}
|
||||
}
|
||||
|
||||
a {
|
||||
-moz-transition: color 0.2s ease, border-bottom-color 0.2s ease;
|
||||
-webkit-transition: color 0.2s ease, border-bottom-color 0.2s ease;
|
||||
-ms-transition: color 0.2s ease, border-bottom-color 0.2s ease;
|
||||
transition: color 0.2s ease, border-bottom-color 0.2s ease;
|
||||
border-bottom: dotted 1px rgba(255, 255, 255, 0.35);
|
||||
color: inherit;
|
||||
text-decoration: none
|
||||
}
|
||||
a:hover {
|
||||
border-bottom: solid 1px rgba(255, 255, 255, 0.88);
|
||||
color: #ffffff
|
||||
}
|
||||
|
||||
strong, b {
|
||||
color: #ffffff;
|
||||
font-weight: bold
|
||||
}
|
||||
|
||||
em, i {
|
||||
font-style: italic
|
||||
}
|
||||
|
||||
p {
|
||||
margin: 0 0 2em 0
|
||||
}
|
||||
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
color: #ffffff;
|
||||
font-weight: bold;
|
||||
line-height: 1.5
|
||||
}
|
||||
h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {
|
||||
color: inherit;
|
||||
text-decoration: none
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 2.75em
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 1.75em
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: 1.1em
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-size: 1em
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: 0.8em
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-size: 0.6em
|
||||
}
|
||||
|
||||
@media screen and (max-width: 736px) {
|
||||
h1 {
|
||||
font-size: 3em
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 1.75em
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: 1em
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-size: 0.8em
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: 0.6em
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-size: 0.6em
|
||||
}
|
||||
}
|
||||
|
||||
code {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 0.25em;
|
||||
border: solid 1px rgba(255, 255, 255, 0.15);
|
||||
font-family: "Courier New", monospace;
|
||||
font-size: 0.9em;
|
||||
margin: 0 0.25em;
|
||||
padding: 0.25em 0.65em
|
||||
}
|
||||
|
||||
pre {
|
||||
-webkit-overflow-scrolling: touch;
|
||||
font-family: "Courier New", monospace;
|
||||
font-size: 0.9em;
|
||||
margin: 0 0 2em 0
|
||||
}
|
||||
pre code {
|
||||
display: block;
|
||||
line-height: 1.75em;
|
||||
padding: 1em 1.5em;
|
||||
overflow-x: auto
|
||||
}
|
||||
|
||||
|
||||
.text-center {
|
||||
text-align: center
|
||||
}
|
||||
|
||||
/* Button */
|
||||
input[type="button"],
|
||||
button,
|
||||
.button {
|
||||
-moz-appearance: none;
|
||||
-webkit-appearance: none;
|
||||
-ms-appearance: none;
|
||||
appearance: none;
|
||||
-moz-transition: border-color 0.2s ease;
|
||||
-webkit-transition: border-color 0.2s ease;
|
||||
-ms-transition: border-color 0.2s ease;
|
||||
transition: border-color 0.2s ease;
|
||||
background-color: #fff;
|
||||
border: solid 1px !important;
|
||||
border-color: rgba(255, 255, 255, 0.15) !important;
|
||||
border-radius: 3em;
|
||||
color: #393939 !important;
|
||||
cursor: pointer;
|
||||
display: inline-block;
|
||||
font-size: 0.7em;
|
||||
font-weight: bold;
|
||||
letter-spacing: 0.25em;
|
||||
line-height: 4.75em;
|
||||
outline: 0;
|
||||
padding: 0 3.75em;
|
||||
position: relative;
|
||||
text-align: center;
|
||||
text-decoration: none;
|
||||
text-transform: uppercase;
|
||||
white-space: nowrap
|
||||
}
|
||||
input[type="button"]:after,
|
||||
button:after,
|
||||
.button:after {
|
||||
-moz-transform: scale(0.25);
|
||||
-webkit-transform: scale(0.25);
|
||||
-ms-transform: scale(0.25);
|
||||
transform: scale(0.25);
|
||||
pointer-events: none;
|
||||
-moz-transition: opacity 0.2s ease, -moz-transform 0.2s ease;
|
||||
-webkit-transition: opacity 0.2s ease, -webkit-transform 0.2s ease;
|
||||
-ms-transition: opacity 0.2s ease, -ms-transform 0.2s ease;
|
||||
transition: opacity 0.2s ease, transform 0.2s ease;
|
||||
background: #ffffff;
|
||||
border-radius: 3em;
|
||||
content: '';
|
||||
height: 100%;
|
||||
left: 0;
|
||||
opacity: 0;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
width: 100%
|
||||
}
|
||||
input[type="button"]:hover,
|
||||
button:hover,
|
||||
.button:hover {
|
||||
border-color: rgba(255, 255, 255, 0.6) !important
|
||||
}
|
||||
input[type="button"]:hover:after,
|
||||
button:hover:after,
|
||||
.button:hover:after {
|
||||
opacity: 0.05;
|
||||
-moz-transform: scale(1);
|
||||
-webkit-transform: scale(1);
|
||||
-ms-transform: scale(1);
|
||||
transform: scale(1)
|
||||
}
|
||||
input[type="button"]:hover:active,
|
||||
button:hover:active,
|
||||
.button:hover:active {
|
||||
border-color: #ffffff !important
|
||||
}
|
||||
input[type="button"]:hover:active:after,
|
||||
button:hover:active:after,
|
||||
.button:hover:active:after {
|
||||
opacity: 0.1
|
||||
}
|
||||
|
||||
input[type="password"] {
|
||||
border: 0;
|
||||
outline: 0;
|
||||
padding: 15px;
|
||||
border-radius: 10px;
|
||||
width: 300px
|
||||
}
|
||||
|
||||
/* Wrapper */
|
||||
.wrapper {
|
||||
position: relative
|
||||
}
|
||||
.wrapper > .inner {
|
||||
width: 100%;
|
||||
padding: 5em 4em 2em 4em
|
||||
}
|
||||
@media screen and (max-width: 1680px) {
|
||||
footer > .inner {
|
||||
padding: 2em 4em 2em 4em !important
|
||||
}
|
||||
}
|
||||
@media screen and (max-width: 736px) {
|
||||
|
||||
.wrapper > .inner {
|
||||
padding: 2em 2em 2em 2em
|
||||
}
|
||||
footer > .inner {
|
||||
padding: 2em 2em 2em 2em !important
|
||||
}
|
||||
}
|
||||
.wrapper.style2 {
|
||||
background-color: #5052b5
|
||||
}
|
||||
|
||||
.wrapper.fullscreen {
|
||||
min-height: calc(87vh - 2.5em)
|
||||
}
|
||||
@media screen and (max-width: 736px) {
|
||||
|
||||
.wrapper.fullscreen {
|
||||
min-height: calc(40vh - 5.5em)
|
||||
}
|
||||
}
|
||||
|
||||
/* Wrapper */
|
||||
#topMenu + #wrapper {
|
||||
margin-left: 0;
|
||||
position: relative
|
||||
}
|
||||
@media screen and (max-width: 736px) {
|
||||
#topMenu + #wrapper {
|
||||
padding-top: 0;
|
||||
top: 2em
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#header + #wrapper > .wrapper > .inner {
|
||||
margin: 0 auto
|
||||
}
|
||||
|
||||
/* Menu */
|
||||
#topMenu {
|
||||
padding: 0;
|
||||
background:0;
|
||||
cursor: default;
|
||||
height: 5.4em;
|
||||
left: 0;
|
||||
text-align: center;
|
||||
top: 0;
|
||||
width: 100%;
|
||||
line-height: 3.5em;
|
||||
position: relative;
|
||||
z-index: 20
|
||||
}
|
||||
|
||||
#topMenu > .inner {
|
||||
display: -moz-flex;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flex;
|
||||
display: flex;
|
||||
-moz-flex-direction: row;
|
||||
-webkit-flex-direction: row;
|
||||
-ms-flex-direction: row;
|
||||
flex-direction: row;
|
||||
-moz-justify-content: center;
|
||||
-webkit-justify-content: center;
|
||||
-ms-justify-content: center;
|
||||
justify-content: center;
|
||||
-moz-transform: translateY(0);
|
||||
-webkit-transform: translateY(0);
|
||||
-ms-transform: translateY(0);
|
||||
transform: translateY(0);
|
||||
-moz-transition: opacity 1s ease;
|
||||
-webkit-transition: opacity 1s ease;
|
||||
-ms-transition: opacity 1s ease;
|
||||
transition: opacity 1s ease;
|
||||
min-height: 100%;
|
||||
opacity: 1;
|
||||
width: 100%
|
||||
}
|
||||
#topMenu nav {
|
||||
height: inherit;
|
||||
line-height: inherit;
|
||||
margin-top: 1em
|
||||
}
|
||||
#topMenu nav ul {
|
||||
display: -moz-flex;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flex;
|
||||
display: flex;
|
||||
height: inherit;
|
||||
line-height: inherit;
|
||||
list-style: none;
|
||||
padding: 0
|
||||
}
|
||||
#topMenu nav a {
|
||||
height: inherit;
|
||||
line-height: inherit;
|
||||
padding: 0
|
||||
}
|
||||
#topMenu nav > ul > li {
|
||||
margin: 0 1em 0 1em;
|
||||
opacity: 1;
|
||||
padding: 0;
|
||||
position: relative;
|
||||
height: inherit;
|
||||
line-height: inherit
|
||||
}
|
||||
|
||||
#topMenu nav a {
|
||||
border: 0;
|
||||
font-size: 0.70em;
|
||||
font-weight: bold;
|
||||
letter-spacing: 0.25em;
|
||||
line-height: 1.75;
|
||||
outline: 0;
|
||||
padding: 2em 0;
|
||||
position: relative;
|
||||
text-decoration: none;
|
||||
text-transform: uppercase
|
||||
}
|
||||
#topMenu nav li.active, nav li.active a {
|
||||
color: #fff !important
|
||||
}
|
||||
#topMenu nav .active a{
|
||||
border-bottom: 1px solid #ffffff7d
|
||||
}
|
||||
#topMenu nav a:hover {
|
||||
border-bottom: 1px solid #ffffff59
|
||||
|
||||
}
|
||||
#topMenu nav a.active {
|
||||
color: #ffffff
|
||||
}
|
||||
#topMenu nav a.active:after {
|
||||
max-width: 100%
|
||||
}
|
||||
|
||||
@media screen and (max-width: 736px) {
|
||||
#topMenu {
|
||||
height: auto;
|
||||
font-size: 0.94em;
|
||||
position: relative;
|
||||
background-color: rgba(0, 0, 0, 0.30);
|
||||
padding-bottom: 20px
|
||||
}
|
||||
#topMenu nav ul {
|
||||
display: block;
|
||||
float: left
|
||||
}
|
||||
#topMenu nav > ul > li {
|
||||
display: block;
|
||||
float: left;
|
||||
margin: 0 1em 0 2em
|
||||
}
|
||||
#topMenu nav .active a {
|
||||
border-bottom: 1px solid #fff
|
||||
}
|
||||
footer {
|
||||
font-size: 1em
|
||||
}
|
||||
}
|
||||
|
||||
/* Intro */
|
||||
#intro p {
|
||||
font-size: 1.25em
|
||||
}
|
||||
@media screen and (max-width: 736px) {
|
||||
#intro p {
|
||||
font-size: 1em
|
||||
}
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
footer {
|
||||
text-align: right
|
||||
}
|
||||
|
||||
/* Submenus */
|
||||
.subPageDropdown a {
|
||||
border: 0 !important
|
||||
}
|
||||
|
||||
.subPageDropdown ul {
|
||||
margin: 0;
|
||||
padding-left: 0
|
||||
}
|
||||
|
||||
.subPageDropdown li {
|
||||
color: #fff;
|
||||
display: block;
|
||||
float: left;
|
||||
position: relative;
|
||||
padding: 0 1em 0 1em;
|
||||
text-decoration: none;
|
||||
transition-duration: 0.5s
|
||||
}
|
||||
|
||||
#topMenu li a {
|
||||
color: rgba(255, 255, 255, 0.8)
|
||||
}
|
||||
|
||||
#topMenu li:hover,
|
||||
#topMenu li:focus-within {
|
||||
cursor: pointer
|
||||
}
|
||||
|
||||
#topMenu li:focus-within a {
|
||||
outline: none
|
||||
}
|
||||
|
||||
#topMenu .nav-item {
|
||||
margin-top: 5px
|
||||
}
|
||||
|
||||
ul.subPageDropdown {
|
||||
visibility: hidden;
|
||||
opacity: 0;
|
||||
position: absolute;
|
||||
margin-top: 10px;
|
||||
display: none;
|
||||
padding-left: 10px !important
|
||||
}
|
||||
|
||||
#topMenu ul li:hover > ul,
|
||||
#topMenu ul li:focus-within > ul,
|
||||
#topMenu ul li ul:hover,
|
||||
#topMenu ul li ul:focus {
|
||||
visibility: visible;
|
||||
opacity: 1;
|
||||
display: block
|
||||
}
|
||||
|
||||
#topMenu ul li ul li {
|
||||
clear: both;
|
||||
text-align: left;
|
||||
background-color: rgba(0, 0, 0, 0.30);
|
||||
white-space: nowrap
|
||||
}
|
||||
|
||||
/* Submenus dropdown arrow */
|
||||
.menu li > a:after {
|
||||
content: ' ▼';
|
||||
font-weight: bold
|
||||
}
|
||||
|
||||
.menu > li > a:after {
|
||||
content: ' ▼';
|
||||
font-weight: bold
|
||||
}
|
||||
|
||||
.menu li > a:only-child:after {
|
||||
content: ''
|
||||
}
|
@ -0,0 +1,84 @@
|
||||
<?php global $Wcms ?>
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="<?= $Wcms->getSiteLanguage() ?>">
|
||||
<head>
|
||||
<!-- Encoding, browser compatibility, viewport -->
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
|
||||
<!-- Search Engine Optimization (SEO) -->
|
||||
<meta name="title" content="<?= $Wcms->get('config', 'siteTitle') ?> - <?= $Wcms->page('title') ?>" />
|
||||
<meta name="description" content="<?= $Wcms->page('description') ?>">
|
||||
<meta name="keywords" content="<?= $Wcms->page('keywords') ?>">
|
||||
<meta property="og:url" content="<?= $this->url() ?>" />
|
||||
<meta property="og:type" content="website" />
|
||||
<meta property="og:site_name" content="<?= $Wcms->get('config', 'siteTitle') ?>" />
|
||||
<meta property="og:title" content="<?= $Wcms->page('title') ?>" />
|
||||
<meta name="twitter:site" content="<?= $this->url() ?>" />
|
||||
<meta name="twitter:title" content="<?= $Wcms->get('config', 'siteTitle') ?> - <?= $Wcms->page('title') ?>" />
|
||||
<meta name="twitter:description" content="<?= $Wcms->page('description') ?>" />
|
||||
|
||||
<!-- Website and page title -->
|
||||
<title>
|
||||
<?= $Wcms->get('config', 'siteTitle') ?> - <?= $Wcms->page('title') ?>
|
||||
|
||||
</title>
|
||||
|
||||
<!-- Admin CSS -->
|
||||
<?= $Wcms->css() ?>
|
||||
|
||||
<!-- Theme CSS -->
|
||||
<link rel="stylesheet" rel="preload" as="style" href="<?= $Wcms->asset('css/style.css') ?>">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<!-- Admin settings panel and alerts -->
|
||||
<?= $Wcms->settings() ?>
|
||||
|
||||
<?= $Wcms->alerts() ?>
|
||||
|
||||
<section id="topMenu">
|
||||
<div class="inner">
|
||||
<nav>
|
||||
<ul class="menu">
|
||||
<!-- Menu -->
|
||||
<?= $Wcms->menu() ?>
|
||||
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<div id="wrapper">
|
||||
<section id="intro" class="wrapper style1 fullscreen">
|
||||
<div class="inner">
|
||||
<!-- Main content for each page -->
|
||||
<?= $Wcms->page('content') ?>
|
||||
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section class="wrapper style2">
|
||||
<div class="inner">
|
||||
<!-- Static editable block, same on each page -->
|
||||
<?= $Wcms->block('subside') ?>
|
||||
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<footer class="wrapper style2">
|
||||
<div class="inner">
|
||||
<!-- Footer -->
|
||||
<?= $Wcms->footer() ?>
|
||||
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
<!-- Admin JavaScript. More JS libraries can be added below -->
|
||||
<?= $Wcms->js() ?>
|
||||
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,13 @@
|
||||
{
|
||||
"version": 1,
|
||||
"themes": {
|
||||
"sky": {
|
||||
"name": "Sky",
|
||||
"repo": "https://github.com/robiso/sky/tree/master",
|
||||
"zip": "https://github.com/robiso/sky/archive/master.zip",
|
||||
"summary": "Default WonderCMS theme (2022). Theme works without Bootstrap and jQuery.",
|
||||
"version": "3.2.4",
|
||||
"image": "https://raw.githubusercontent.com/robiso/sky/master/preview.jpg"
|
||||
}
|
||||
}
|
||||
}
|
125
.recipes/wondercms_php8/tools/backup.d/storage_backup.sh
Executable file
125
.recipes/wondercms_php8/tools/backup.d/storage_backup.sh
Executable file
@ -0,0 +1,125 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# A service script to backup the entire WonderCMS website.
|
||||
# Creates a tarball in $BASE_DIR/storage/backups/tarballs folder
|
||||
# (by default). An optional parameter may change the target folder.
|
||||
#
|
||||
# Call as a Docker manager user (member of the docker Linux group) via cron.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
# 2025-03-05 v0.2
|
||||
# mod: gitbackup handling stub has been temporarily removed.
|
||||
# 2025-01-14 v0.1 Initial version.
|
||||
|
||||
# Accepted environment variables and their defaults.
|
||||
#
|
||||
PAR_BASEDIR=${PAR_BASEDIR:-""} # Service's base folder
|
||||
PAR_BACKUPDIR=${PAR_BACKUPDIR:-""} # Folder to dump within
|
||||
|
||||
# Messages (maybe overridden by configuration).
|
||||
#
|
||||
MSG_DOCKERGRPNEED="You must be a member of the docker group."
|
||||
MSG_DOESNOTRUN="This service doesn't run."
|
||||
MSG_MISSINGDEP="Fatal: missing dependency"
|
||||
MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
|
||||
MSG_NONWRITE="The target directory isn't writable"
|
||||
MSG_NOLOCATE="Cannot locate the WonderCMS container."
|
||||
|
||||
# Other initialisations.
|
||||
#
|
||||
BACKUPDIR="storage/backups/tarballs" # Folder to dump within
|
||||
GITBACKUP="storage_gitbackup.sh" # Git backup utility
|
||||
SERVICENAME="wondercms" # The composed WonderCMS service
|
||||
USER=${USER:-LOGNAME} # Fix for cron enviroment only
|
||||
YMLFILE="docker-compose.yml"
|
||||
|
||||
# Checks the dependencies.
|
||||
#
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in basename cat cut date dirname docker \
|
||||
find grep hostname id pwd tail xargs
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
# All dependencies are available via "$THECOMMAND" (upper case) call.
|
||||
#
|
||||
# Let's find which version of docker-compose is installed.
|
||||
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then
|
||||
# We'll use v2 if it is available.
|
||||
DOCKER_COMPOSE="$DOCKER"
|
||||
commandstring="compose"
|
||||
else
|
||||
# Otherwise falling back to v1.
|
||||
DOCKER_COMPOSE="$(which docker-compose)"
|
||||
commandstring=""
|
||||
fi
|
||||
# One of the two is mandatory.
|
||||
if [ -z "$DOCKER_COMPOSE" ];then echo "$MSG_MISSINGDEP docker-compose" >&2; exit 1; fi
|
||||
# Below docker-compose should be called as "$DOCKER_COMPOSE" $commandstring sequence.
|
||||
|
||||
# Where I'm?
|
||||
# https://gist.github.com/TheMengzor/968e5ea87e99d9c41782
|
||||
SOURCE="$0"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
SOURCE="$("$READLINK" "$SOURCE")"
|
||||
# if $SOURCE was a relative symlink, we need to resolve it
|
||||
# relative to the path where the symlink file was located
|
||||
[[ $SOURCE != /* ]] && SOURCE="$SCRPATH/$SOURCE"
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
|
||||
# Need to be root or a Docker manager user.
|
||||
#
|
||||
[[ "$USER" != 'root' ]] \
|
||||
&& [[ -z "$(echo "$("$ID" -Gn "$USER") " | "$GREP" ' docker ')" ]] \
|
||||
&& echo "$MSG_DOCKERGRPNEED" >&2 && exit 1 #"
|
||||
|
||||
# Searches the base folder, containing a docker-compose.yml file.
|
||||
#
|
||||
# Called from the base folder (./)?
|
||||
BASE_DIR="$PAR_BASEDIR"
|
||||
TEST_DIR="$SCRPATH"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools/*.d?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# On failure gives it up here.
|
||||
if [ -z "$BASE_DIR" -o ! -r "$BASE_DIR/$YMLFILE" ]; then
|
||||
echo "$MSG_MISSINGYML" >&2; exit 1
|
||||
fi
|
||||
# Sets the absolute paths.
|
||||
BACKUPDIR="${PAR_BACKUPDIR:-$BASE_DIR/$BACKUPDIR}"
|
||||
|
||||
# The dump target folder must be writable.
|
||||
#
|
||||
[[ ! -w "$BACKUPDIR" ]] \
|
||||
&& echo "$MSG_NONWRITE: $BACKUPDIR" >&2 && exit 1
|
||||
|
||||
# The service must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Converts the service name to an actual running container's name.
|
||||
#
|
||||
MYCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$SERVICENAME") | "$CUT" -c2-)"
|
||||
# Gives up here if failed.
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi
|
||||
|
||||
# Tries the FS backup.
|
||||
if [ -w "$BACKUPDIR" ]; then
|
||||
BACKUP_NAME=$MYCONTAINER.$("$DATE" '+%Y%m%d_%H%M%S').$("$HOSTNAME")
|
||||
"$DOCKER" exec $MYCONTAINER sh \
|
||||
-c "cd /var/www/html; tar cz ." \
|
||||
> "$BACKUPDIR/$BACKUP_NAME.tgz" 2>>"$BACKUPDIR/$BACKUP_NAME.log"
|
||||
fi
|
||||
|
||||
# That's all, Folks! :)
|
@ -1,6 +1,5 @@
|
||||
# Wordpress with MariaDB
|
||||
#
|
||||
#version: '3'
|
||||
services:
|
||||
# https://hub.docker.com/_/wordpress
|
||||
# https://github.com/docker-library/docs/tree/master/wordpress
|
||||
|
@ -1,8 +1,8 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# A service script to backup the docker-composed WordPress instance.
|
||||
# Dumps the MySQL/MariaDB database to the $BASE_DIR/storage/backups/dumps
|
||||
# folder (by default). An optional parameter may change the target folder.
|
||||
# A service script to backup the docker-composed MySQL/MariaDB database.
|
||||
# Dumps database to the $BASE_DIR/storage/backups/dumps folder (by default).
|
||||
# An optional parameter may change the target folder.
|
||||
#
|
||||
# This script gets the database credentials from the docker-compose.yml file
|
||||
# and calls the mysql_dumpdb worker script which should be installed in
|
||||
@ -10,8 +10,11 @@
|
||||
#
|
||||
# Call as a Docker manager user (member of the docker Linux group) via cron.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
# 2025-02-26 v0.3
|
||||
# mod: doesn't tied to a particular composition (Mediawiki, Wordpress, etc).
|
||||
# 2024-12-01 v0.2.1
|
||||
# fix: typo in docker-compose version detection.
|
||||
# 2024-08-25 v0.2
|
||||
@ -22,6 +25,7 @@
|
||||
#
|
||||
PAR_BASEDIR=${PAR_BASEDIR:-""} # Service's base folder
|
||||
PAR_DUMPDIR=${PAR_DUMPDIR:-""} # Folder to dump within
|
||||
PAR_SERVICE=${PAR_SERVICE:-"database"} # Service's name in composition
|
||||
|
||||
# Messages (maybe overridden by configuration).
|
||||
#
|
||||
@ -111,7 +115,7 @@ DUMPDIR="${PAR_DUMPDIR:-$BASE_DIR/$DUMPDIR}"
|
||||
[[ ! -w "$DUMPDIR" ]] \
|
||||
&& echo "$MSG_NONWRITE: $DUMPDIR" >&2 && exit 1
|
||||
|
||||
# The service must be running - silently gives up here if not.
|
||||
# The composition must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
@ -135,14 +139,14 @@ function parse { [[ -z "$1" ]] && return
|
||||
echo -e "$value"; return
|
||||
}
|
||||
# All parameters are mandatories.
|
||||
MYCONTAINER="$(parse "WORDPRESS_DB_HOST")"
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOPARAM WORDPRESS_DB_HOST" >&2; exit 1; fi
|
||||
MYDATABASE="$(parse "WORDPRESS_DB_NAME")"
|
||||
if [ -z "$MYDATABASE" ]; then echo "$MSG_NOPARAM WORDPRESS_DB_NAME" >&2; exit 1; fi
|
||||
MYUSER="$(parse "WORDPRESS_DB_USER")"
|
||||
if [ -z "$MYUSER" ]; then echo "$MSG_NOPARAM WORDPRESS_DB_USER" >&2; exit 1; fi
|
||||
MYPASSWORD="$(parse "WORDPRESS_DB_PASSWORD")"
|
||||
if [ -z "$MYPASSWORD" ]; then echo "$MSG_NOPARAM WORDPRESS_DB_PASSWORD" >&2; exit 1; fi
|
||||
MYCONTAINER="$PAR_SERVICE" # TODO: guess from the yml
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOPARAM PAR_SERVICE" >&2; exit 1; fi1; fi
|
||||
MYDATABASE="$(parse "MYSQL_DATABASE")"
|
||||
if [ -z "$MYDATABASE" ]; then echo "$MSG_NOPARAM MYSQL_DATABASE" >&2; exit 1; fi
|
||||
MYUSER="$(parse "MYSQL_USER")"
|
||||
if [ -z "$MYUSER" ]; then echo "$MSG_NOPARAM MYSQL_USER" >&2; exit 1; fi
|
||||
MYPASSWORD="$(parse "MYSQL_PASSWORD")"
|
||||
if [ -z "$MYPASSWORD" ]; then echo "$MSG_NOPARAM MYSQL_PASSWORD" >&2; exit 1; fi
|
||||
# We've the configuration parsed.
|
||||
|
||||
# Converts the database service name to an actual running container's name.
|
||||
|
@ -7,8 +7,13 @@
|
||||
#
|
||||
# Call as a Docker manager user (member of the docker Linux group) via cron.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
# 2025-05-20 v0.3
|
||||
# fix: a stupid typo in service name conversion.
|
||||
# 2025-03-05 v0.2.1
|
||||
# mod: reworded some comments and renamed a variable.
|
||||
# 2024-08-25 v0.2
|
||||
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
|
||||
# 2021-10-19 v0.1 Initial version.
|
||||
@ -25,7 +30,7 @@ MSG_DOESNOTRUN="This service doesn't run."
|
||||
MSG_MISSINGDEP="Fatal: missing dependency"
|
||||
MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
|
||||
MSG_NONWRITE="The target directory isn't writable"
|
||||
MSG_NOLOCATE="Cannot locate the Mediawiki container."
|
||||
MSG_NOLOCATE="Cannot locate the service container."
|
||||
|
||||
# Other initialisations.
|
||||
#
|
||||
@ -108,16 +113,16 @@ BACKUPDIR="${PAR_BACKUPDIR:-$BASE_DIR/$BACKUPDIR}"
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Converts the WordPress service name to an actual running container's name.
|
||||
# Converts the service name to an actual running container's name.
|
||||
#
|
||||
WPCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandline ps -q "$SERVICENAME") | "$CUT" -c2-)"
|
||||
MYCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$SERVICENAME") | "$CUT" -c2-)"
|
||||
# Gives up here if failed.
|
||||
if [ -z "$WPCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi
|
||||
|
||||
# Tries the FS backup.
|
||||
if [ -w "$BACKUPDIR" ]; then
|
||||
BACKUP_NAME=$WPCONTAINER.$("$DATE" '+%Y%m%d_%H%M%S').$("$HOSTNAME")
|
||||
"$DOCKER" exec $WPCONTAINER sh \
|
||||
BACKUP_NAME=$MYCONTAINER.$("$DATE" '+%Y%m%d_%H%M%S').$("$HOSTNAME")
|
||||
"$DOCKER" exec $MYCONTAINER sh \
|
||||
-c "cd /var/www/html; tar cz ." \
|
||||
> "$BACKUPDIR/$BACKUP_NAME.tgz" 2>>"$BACKUPDIR/$BACKUP_NAME.log"
|
||||
fi
|
||||
|
@ -11,6 +11,11 @@
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-05-21 v0.3.1
|
||||
# fix: Wrong variable name (BASE_DIR instead of SERVICE_BASE) in a check.
|
||||
# 2025-05-20 v0.3
|
||||
# fix: The docker itself was missing from the dependency list :(.
|
||||
# fix: Wrong output redirection order (>/dev/null 2>&1 was reversed).
|
||||
# 2024-08-25 v0.2
|
||||
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
|
||||
# mod: Doesn't do backup if the service is down.
|
||||
@ -50,7 +55,7 @@ YMLFILE="docker-compose.yml"
|
||||
#
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in cut date dirname git readlink
|
||||
for item in cut date dirname docker git readlink
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]')=$(which $item)
|
||||
@ -72,7 +77,7 @@ while [ -h "$SOURCE" ]; do
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
|
||||
# Let's find which version of docker-compose is installed.
|
||||
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then
|
||||
if [ $($DOCKER compose version >/dev/null 2>&1; echo $?) -eq 0 ]; then
|
||||
# We'll use v2 if it is available.
|
||||
DOCKER_COMPOSE="$DOCKER"
|
||||
commandstring="compose"
|
||||
@ -132,7 +137,7 @@ fi
|
||||
|
||||
# The service must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
[[ -z "$(cd "$SERVICE_BASE"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Attempts the backup commit.
|
||||
@ -146,6 +151,7 @@ if [ ! -d "$GITDIR/.git" ]; then
|
||||
fi
|
||||
# Stages all the files and non-empty folders.
|
||||
"$GIT" --git-dir="$GITDIR/.git" --work-tree="$SOURCEDIR" add . >/dev/null
|
||||
|
||||
# Stores the file system metadata as well, if the tool has been installed.
|
||||
if [ ! -z "$(which metastore)" -a -x "$(which metastore)" ]; then
|
||||
# This commamd silently creates the metastore file if it doesnt' exist yet.
|
||||
|
195
.recipes/wordpress_mariadb/tools/restoredb_mysql.sh
Normal file
195
.recipes/wordpress_mariadb/tools/restoredb_mysql.sh
Normal file
@ -0,0 +1,195 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Restores a composed MySQL/MariaDB database from a dump file.
|
||||
# Gets all necessary data from the docker-compose.yml file.
|
||||
#
|
||||
# This is a wrapper script to the system-wide mysql_restoredb tool.
|
||||
# Database recovey with the necessary user management and grants
|
||||
# requires superuser privileges in MySQL, but simple data recovery
|
||||
# is possible if the user and privileges are already set.
|
||||
#
|
||||
# You have to call this script as a Docker manager user (member of the
|
||||
# 'docker' Linux group). The worker tool must be available somewhere
|
||||
# in PATH. At least 5.7.6 MySQL or at least 10.1.3 MariaDB is required.
|
||||
#
|
||||
# Usage:
|
||||
# $0 path_to_the_dumpfile [ path_to_the_service's_base ]
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
#
|
||||
# 2025-02-26 v0.1 Forked from the Smartfront repository and rewritten.
|
||||
|
||||
# Accepted environment variables and their defaults.
|
||||
#
|
||||
PAR_SERVICE=${SERVICE:-"database"} # Database container's name
|
||||
|
||||
# Other initialisations.
|
||||
#
|
||||
BACKUPFOLDER="storage/backups/dumps" # Skeleton's default dump folder
|
||||
PROP_DBAPASS="MYSQL_ROOT_PASSWORD" # DB admin password property
|
||||
PROP_DBNAME="MYSQL_DATABASE" # DB name property
|
||||
PROP_DBPASS="MYSQL_PASSWORD" # DB password property
|
||||
PROP_DBUSER="MYSQL_USER" # DB username property
|
||||
USER=${USER:-LOGNAME} # Fix for cron enviroment only
|
||||
YMLFILE="docker-compose.yml"
|
||||
|
||||
# Basic environment settings.
|
||||
#
|
||||
LANG=C
|
||||
LC_ALL=C
|
||||
|
||||
# Messages.
|
||||
#
|
||||
MSG_BADDUMP="Fatal: doesn't exist or doesn't a dumpfile:"
|
||||
MSG_DOCKERGRPNEED="You must be a member of the docker group."
|
||||
MSG_DOESNOTRUN="This service doesn't run."
|
||||
MSG_MISSINGDEP="Fatal: missing dependency"
|
||||
MSG_MISSINGCONF="Fatal: missing config file"
|
||||
MSG_MISSINGYML="Fatal: didn't find the $YMLFILE file"
|
||||
MSG_NOLOCATE="Cannot locate the database container."
|
||||
MSG_NOPARAM="Missing environment parameter"
|
||||
|
||||
MSG_USAGE="Usage: $0 dump_pathname [ composition_base_pathname ]\n"
|
||||
MSG_USAGE+="ENVVAR:\n"
|
||||
MSG_USAGE+="SERVICE \tDatabase service's name in composition\n"
|
||||
|
||||
# Checks the dependencies.
|
||||
#
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in basename cat cut date dirname docker \
|
||||
grep id mysql_restoredb readlink tail xargs
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
# All dependencies are available via "$THECOMMAND" (upper case) call.
|
||||
#
|
||||
# Let's find which version of docker-compose is installed.
|
||||
if [ $($DOCKER compose version >/dev/null 2>&1; echo $?) -eq 0 ]; then
|
||||
# We'll use v2 if it is available.
|
||||
DOCKER_COMPOSE="$DOCKER"
|
||||
commandstring="compose"
|
||||
else
|
||||
# Otherwise falling back to v1.
|
||||
DOCKER_COMPOSE="$(which docker-compose)"
|
||||
commandstring=""
|
||||
fi
|
||||
# One of the two is mandatory.
|
||||
if [ -z "$DOCKER_COMPOSE" ];then echo "$MSG_MISSINGDEP docker-compose" >&2; exit 1; fi
|
||||
# Below docker-compose should be called as "$DOCKER_COMPOSE" $commandstring sequence.
|
||||
|
||||
# Where I'm?
|
||||
# https://gist.github.com/TheMengzor/968e5ea87e99d9c41782
|
||||
SOURCE="$0"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
SOURCE="$("$READLINK" "$SOURCE")"
|
||||
# if $SOURCE was a relative symlink, we need to resolve it
|
||||
# relative to the path where the symlink file was located
|
||||
[[ $SOURCE != /* ]] && SOURCE="$SCRPATH/$SOURCE"
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && echo "$PWD" )" #"
|
||||
|
||||
# Need to be root or a Docker manager user.
|
||||
#
|
||||
[[ "$USER" != 'root' ]] \
|
||||
&& [[ -z "$(echo "$("$ID" -Gn "$USER") " | "$GREP" ' docker ')" ]] \
|
||||
&& echo "$MSG_DOCKERGRPNEED" >&2 && exit 1 #"
|
||||
|
||||
# Gets the command line parameters.
|
||||
#
|
||||
# DUMPFILE is mandatory
|
||||
if [ -n "$1" ]; then DUMPFILE="$1"; shift
|
||||
else echo -e "$MSG_USAGE" >&2; exit 1; fi
|
||||
# SERVICE_BASE is optional
|
||||
if [ -n "$1" ]; then SERVICE_BASE="$1"; shift; fi
|
||||
# We've read the unchecked command line parameters.
|
||||
|
||||
# Searches the base folder, containing the YMLFILE.
|
||||
#
|
||||
if [ -z "$SERVICE_BASE" ]; then
|
||||
# Called from the base folder (./)?
|
||||
TEST_DIR="$SCRPATH"
|
||||
[[ -z "$SERVICE_BASE" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && SERVICE_BASE="$TEST_DIR"
|
||||
# Called from ./tools?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$SERVICE_BASE" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && SERVICE_BASE="$TEST_DIR"
|
||||
# Called from ./tools/*.d?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$SERVICE_BASE" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && SERVICE_BASE="$TEST_DIR"
|
||||
fi
|
||||
# On failure gives it up here.
|
||||
if [ -z "$SERVICE_BASE" -o ! -r "$SERVICE_BASE/$YMLFILE" ]; then
|
||||
echo "$MSG_MISSINGYML" >&2; exit 1
|
||||
fi
|
||||
# Sets the absolute path.
|
||||
YMLFILE="$SERVICE_BASE/$YMLFILE"
|
||||
# We've the YMLFILE.
|
||||
|
||||
# Finds the DUMPFILE to use.
|
||||
#
|
||||
# The DUMPFILE must point to a readable file.
|
||||
# If doesn't it tries the skeleton's standard backup folder as well.
|
||||
if [ ! -r "$DUMPFILE" ]
|
||||
then DUMPFILE="$("$DIRNAME" "$SERVICE_BASE")/$BACKUPFOLDER/$DUMPFILE"; fi
|
||||
# If it is an existing symlink dereferences it to ensure, it points to a file.
|
||||
if [ -h "$DUMPFILE" ]; then
|
||||
if [[ "$("$READLINK" "$DUMPFILE")" != /* ]]
|
||||
# relative path in symlink
|
||||
then DUMPFILE="$("$DIRNAME" "$DUMPFILE")/$("$READLINK" "$DUMPFILE")"
|
||||
# absolute path in symlink
|
||||
else DUMPFILE="$("$READLINK" "$DUMPFILE")"; fi
|
||||
fi
|
||||
# Let's check it!
|
||||
if [ ! -r "$DUMPFILE" -o ! -f "$DUMPFILE" ]
|
||||
then echo -e "$MSG_BADDUMP $DUMPFILE"; exit 1; fi
|
||||
# We've an existing dumpfile.
|
||||
|
||||
# The composition must be running - silently gives up here if not.
|
||||
#
|
||||
[[ -z "$(cd "$SERVICE_BASE"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 1
|
||||
|
||||
# Parses the YMLFILE for parameters to use.
|
||||
#
|
||||
function parse { [[ -z "$1" ]] && return
|
||||
# Gets the live lines containing the parameter.
|
||||
value=$("$CAT" "$YMLFILE" | "$GREP" -ve '^#' | \
|
||||
"$GREP" -e "^ *$1" | "$TR" -d '\r')
|
||||
# If multiple the last one to consider.
|
||||
value=$(echo -e "$value" | "$TAIL" -n1)
|
||||
# Right side of the colon W/O leading and trailing spaces and quotes.
|
||||
value=$(echo -ne "$value" | "$CUT" -d':' -f2 | "$XARGS")
|
||||
# Removes the trailing semicolon (if any).
|
||||
value=${value%;*}
|
||||
echo -e "$value"; return
|
||||
}
|
||||
# These parameters are mandatory.
|
||||
MYCONTAINER="$PAR_SERVICE" # TODO: guess from the yml
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOPARAM PAR_SERVICE" >&2; exit 1; fi1; fi
|
||||
MYDATABASE="$(parse "$PROP_DBNAME")"
|
||||
if [ -z "$MYDATABASE" ]; then echo "$MSG_NOPARAM $PROP_DBNAME" >&2; exit 1; fi
|
||||
MYUSER="$(parse "$PROP_DBUSER")"
|
||||
if [ -z "$MYUSER" ]; then echo "$MSG_NOPARAM $PROP_DBUSER" >&2; exit 1; fi
|
||||
MYPASSWORD="$(parse "$PROP_DBPASS")"
|
||||
if [ -z "$MYPASSWORD" ]; then echo "$MSG_NOPARAM $PROP_DBPASS" >&2; exit 1; fi
|
||||
# These are optional.
|
||||
MYDBAUSER="root"
|
||||
MYDBAPASSWORD="$(parse "$PROP_DBAPASS")"
|
||||
# We've the configuration parsed.
|
||||
|
||||
# Converts the database service name to an actual running container's name.
|
||||
#
|
||||
MYCONTAINER="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$SERVICE_BASE"; "$DOCKER_COMPOSE" $commandstring ps -q "$MYCONTAINER") | "$CUT" -c2-)"
|
||||
# Gives up here if failed.
|
||||
if [ -z "$MYCONTAINER" ]; then echo "$MSG_NOLOCATE" >&2; exit 1; fi
|
||||
|
||||
# Calls the worker script to make the job.
|
||||
#
|
||||
export MYDBAUSER MYDBAPASSWORD MYPASSWORD
|
||||
"$MYSQL_RESTOREDB" -C "$MYCONTAINER" -U "$MYUSER" "$MYDATABASE" "$DUMPFILE"
|
||||
|
||||
# That's all, Folks! :)
|
7
.templates/.ports
Normal file
7
.templates/.ports
Normal file
@ -0,0 +1,7 @@
|
||||
# This is a human-readable summary of the port allocation.
|
||||
# To be maintained manually. You can use it as you like,
|
||||
# it doesn't matter for docker-skeleton automations.
|
||||
|
||||
8100 ACME for SSL certificates
|
||||
|
||||
8201
|
@ -21,9 +21,13 @@
|
||||
# [-C container] [-d database] [-f dumpfile ]
|
||||
# [database (if not in -d)] [dumpfile (if not in -f)]
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-03-04 v1.1
|
||||
# new: Works with dockerized databases but hasn't yet been tested with natives.
|
||||
# mod: Database user creation and grants rewritten. Now create user @'%'
|
||||
# (instead of @'myhost') if it doesn't already exist.
|
||||
# 2023-06-18 v1.0
|
||||
# new: forked from the "SMARTERP_skeleton" repository.
|
||||
# 2022-04-07 v0.4
|
||||
@ -46,14 +50,10 @@ MYDBAUSER=${MYDBAUSER:-""} # Database admin superuser
|
||||
MYDBAPASSWORD=${MYDBAPASSWORD:-""} # Credential for the DBA user
|
||||
MYDUMP=${MYDUMP-""} # Dump file pathname
|
||||
MYHOST=${MYHOST:-"localhost"} # Connection parameter
|
||||
MYOPTIONS=${MYOPTIONS-""} # Options to pass to pg_dump
|
||||
MYPASSWORD=${MYPASSWORD-""} # Credential for the DB owner
|
||||
MYPORT=${MYPORT:-"3306"} # Connection parameter
|
||||
MYUSER=${MYUSER:-"root"} # Owner of the restored DB
|
||||
|
||||
### Temporailly ignored! Need to sanitize.
|
||||
MYOPTIONS=""
|
||||
|
||||
# Basic environment settings.
|
||||
#
|
||||
LANG=C
|
||||
@ -133,14 +133,21 @@ do
|
||||
;;
|
||||
esac
|
||||
done; shift $((OPTIND -1))
|
||||
#
|
||||
# All options have been processed.
|
||||
|
||||
# Checks the dependencies.
|
||||
#
|
||||
# Conditional dependencies (according to native or dockerized environment).
|
||||
[[ -z "$MYCONTAINER" ]] \
|
||||
&& additem="mysql" \
|
||||
|| additem="docker"
|
||||
if [ -n "$MYCONTAINER" ]; then
|
||||
# Dockerized
|
||||
additem="docker"
|
||||
else
|
||||
# Native - MySQL or MariaDB CLI?
|
||||
if [ -n "$(which mysql)" ]
|
||||
then additem="mysql"
|
||||
else additem="mariadb"; fi
|
||||
fi
|
||||
# Common dependencies.
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
@ -151,6 +158,12 @@ do
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
#
|
||||
# Unifies the call of the clients in a native environment.
|
||||
if [ -z "$MYCONTAINER" ]; then
|
||||
if [ -z "$MYSQL" -a -n "$MARIADB" ]; then MYSQL="$MARIADB"; fi
|
||||
fi
|
||||
#
|
||||
# All dependencies are available via "$THECOMMAND" (upper case) call.
|
||||
|
||||
# Sanitizing the parameters.
|
||||
@ -184,6 +197,7 @@ done
|
||||
#
|
||||
[[ -n "$MYUSER" ]] && [[ ! "$MYUSER" =~ ^([[:alnum:]]|[.-_\\+])*$ ]] \
|
||||
&& echo -e "$MSG_BADPARAM $MYUSER\n$MSG_USAGE" >&2 && exit 1
|
||||
#
|
||||
# We've at least a minimally checked parameters.
|
||||
|
||||
# Need to be root or a Docker manager user if the DB runs in a container.
|
||||
@ -213,6 +227,7 @@ for veto in $vetodatabases ""
|
||||
do
|
||||
[[ "$MYDATABASE" = "$veto" ]] && exit 0
|
||||
done
|
||||
#
|
||||
# We've a database name to restore.
|
||||
|
||||
# Determines the dumpfile.
|
||||
@ -234,6 +249,7 @@ fi
|
||||
# Let's check it!
|
||||
if [ ! -r "$MYDUMPFILE" -o ! -f "$MYDUMPFILE" ]
|
||||
then echo -e "$MSG_BADDUMP $MYDUMPFILE"; exit 1; fi
|
||||
#
|
||||
# We've an existing dumpfile.
|
||||
|
||||
# Tries to get the locale settings (actually CHARACTER SET) of this dump.
|
||||
@ -252,6 +268,7 @@ if [ -z "$MYCHARSET" ]; then
|
||||
# Trims the character set's name itself (the first word after the equal sign).
|
||||
[[ -n "$MYCHARSET" ]] && MYCHARSET=$(echo -e "$MYCHARSET" | "$SED" 's/^.*= \(.*\) .*$/\1/') #'
|
||||
fi
|
||||
#
|
||||
# We've a raw guess about the character sets used.
|
||||
|
||||
# Finds the LOGFILE to use.
|
||||
@ -262,6 +279,7 @@ fi
|
||||
&& LOGFILE="${MYDUMPFILE%.gz}" \
|
||||
&& LOGFILE="${LOGFILE%.*}.log" \
|
||||
|| LOGFILE="/dev/null"
|
||||
#
|
||||
# We've a suitable logfile.
|
||||
|
||||
# Opens the log and takes care to close it when finish.
|
||||
@ -274,35 +292,64 @@ function close_log() {
|
||||
"$TEE" -a "$LOGFILE"
|
||||
}
|
||||
trap -- 'close_log' EXIT
|
||||
#
|
||||
# We started logging.
|
||||
|
||||
# Prepopulates the SQL command skeleton (macro).
|
||||
# Prepopulates two SQL command skeletons (macros).
|
||||
#
|
||||
# This skeleton makes the SQL calls independent to the environment
|
||||
# (native or dockerized) and credentials. We need only actualize the
|
||||
# CONNECT, DATABASE and SQLVERB clauses then eval $DO_SQLVERB.
|
||||
# Warning: the parameters must had been sanitized!
|
||||
DO_SQLVERB=""
|
||||
DO_SQLVERB+="export MYSQL_PWD; "
|
||||
DO_SQLVERB+="\"\$MYSQL\" \$CONNECT -N \$DATABASE "
|
||||
DO_SQLVERB+="-e \"\$SQLVERB\""
|
||||
# We've a suitable SQL macro.
|
||||
# CONNECT, DATABASE and SQLVERB clauses then eval $DO_SQLSTREAM or
|
||||
# $DO_SQLVERB. Warning: the parameters must be sanitized!
|
||||
#
|
||||
if [ -n "$MYCONTAINER" ]; then
|
||||
# When MySQL runs in the container.
|
||||
#
|
||||
if [ -n "$("$DOCKER" exec $MYCONTAINER which mysql)" ]; then
|
||||
DO_SQLVERB="\"\$DOCKER\" exec -e MYSQL_PWD=\"\$MYSQL_PWD\" \$MYCONTAINER mysql "
|
||||
# When MariaDB runs in the container.
|
||||
elif [ -n "$("$DOCKER" exec $MYCONTAINER which mariadb)" ]; then
|
||||
DO_SQLVERB="\"\$DOCKER\" exec -e MYSQL_PWD=\"\$MYSQL_PWD\" \$MYCONTAINER mariadb "
|
||||
DO_SQLSTREAM="\"\$DOCKER\" exec -i -e MYSQL_PWD=\"\$MYSQL_PWD\" \$MYCONTAINER /bin/bash -c \"mariadb "
|
||||
# Otherwise gives it up here.
|
||||
else
|
||||
echo -e "$MSG_BADDBTYPE in $MYCONTAINER." | "$TEE" -a "$LOGFILE" >&2
|
||||
echo -e "$MSG_BLOCKING" | "$TEE" -a "$LOGFILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
# Common parameters.
|
||||
DO_SQLVERB+="\$CONNECT -sN --ssl-verify-server-cert=false \$DATABASE "
|
||||
DO_SQLVERB+="-e \"\$SQLVERB\""
|
||||
DO_SQLSTREAM+="\$CONNECT -sN --ssl-verify-server-cert=false \$DATABASE \""
|
||||
else
|
||||
# Native environment.
|
||||
#
|
||||
DO_SQLVERB="export \"MYSQL_PWD\"; \"\$MYSQL\" "
|
||||
DO_SQLVERB+="\$CONNECT -sN --ssl-verify-server-cert=false \$DATABASE "
|
||||
DO_SQLVERB+="-e \"\$SQLVERB\""
|
||||
SO_SQLSTREAM="$DO_SQLVERB"
|
||||
fi
|
||||
#
|
||||
# We've two suitable SQL macros.
|
||||
|
||||
# Do we connect the database as a DBA?
|
||||
# Are we able to connect to the database, preferably as a DBA?
|
||||
#
|
||||
SQLVERB="SELECT 1;"
|
||||
result=""
|
||||
# Sets the default DBA username for dockerized and native RDBMS as well.
|
||||
if [ -z "$MYDBAUSER" ]; then
|
||||
[[ -n "$MYCONTAINER" ]] \
|
||||
&& MYDBAUSER="root" \
|
||||
|| MYDBAUSER="$USER"
|
||||
|| MYDBAUSER="root"
|
||||
fi
|
||||
# In a native environment we'll try the local connection
|
||||
# (Unix-domain socket) first.
|
||||
if [ -z "$MYCONTAINER" ]; then
|
||||
CONNECT=""
|
||||
DATABASE=""
|
||||
result=$(eval "$DO_SQLVERB" 2>/dev/null); excode=$?
|
||||
result="${result//[[:space:]]/}"
|
||||
fi
|
||||
#
|
||||
# We'll try the local connection (Unix-domain socket) first.
|
||||
CONNECT=""
|
||||
DATABASE=""
|
||||
#result=$(eval "$DO_SQLVERB" 2>/dev/null); excode=$?
|
||||
result="${result//[[:space:]]/}"
|
||||
if [ "$result" != "1" ]; then
|
||||
#
|
||||
# On failure we'll try the TCP connection.
|
||||
@ -327,10 +374,12 @@ if [ "$result" != "1" ]; then
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
#
|
||||
# We've valid MYSQL_PWD and CONNECT clauses.
|
||||
|
||||
# Checks the superuser privilege.
|
||||
# Better check: TODO!
|
||||
#
|
||||
ISDBA=false
|
||||
DATABASE=""
|
||||
SQLVERB="SHOW GRANTS;"
|
||||
@ -343,10 +392,12 @@ else
|
||||
echo -e "$MSG_NONSUPER" | "$TEE" -a "$LOGFILE" >&2
|
||||
echo -e "$MSG_NONBLOCKING" | "$TEE" -a "$LOGFILE" >&2
|
||||
fi
|
||||
#
|
||||
# We know we're a DB superuser or not.
|
||||
|
||||
# Following steps need the superuser privileges.
|
||||
# Lack of this we'll skip them.
|
||||
#
|
||||
if $ISDBA; then
|
||||
DATABASE="mysql"
|
||||
|
||||
@ -379,41 +430,57 @@ if $ISDBA; then
|
||||
&& echo -e "$MSG_BLOCKING" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& exit 1
|
||||
fi
|
||||
#
|
||||
# RDBMS version is proper.
|
||||
|
||||
# Creates the database user (owner) if it doesn't exist.
|
||||
# Database user revision.
|
||||
#
|
||||
echo -e "CREATE USER" | "$TEE" -a "$LOGFILE"
|
||||
SQLVERB=" CREATE USER '$MYUSER'@'$MYHOST'; "
|
||||
# If '$MYUSER'@'$MYHOST' exists, it will provide the necessaty privileges.
|
||||
# If '$MYUSER'@'%' doesn't exist, it will create it then provide the
|
||||
# necessary privileges.
|
||||
#
|
||||
# Checks '$MYUSER'@'$MYHOST'
|
||||
SQLVERB="SELECT COUNT(1) FROM mysql.user WHERE user = '$MYUSER' AND host = '$MYHOST'; "
|
||||
result=$(eval "$DO_SQLVERB" 2> >("$TEE" -a "$LOGFILE" >&2)); excode=$?
|
||||
result="${result//[[:space:]]/}"
|
||||
if [[ $excode -ne 0 ]]; then
|
||||
# Already exists (or something went wrong).
|
||||
echo -e "$MSG_FAILUSER $MYUSER@$MYHOST" | "$TEE" -a "$LOGFILE" >&2
|
||||
echo -e "$MSG_NONBLOCKING" | "$TEE" -a "$LOGFILE" >&2
|
||||
else
|
||||
# Sets the password only if the user has just created.
|
||||
echo -e "SET PASSWORD" | "$TEE" -a "$LOGFILE"
|
||||
SQLVERB="SET PASSWORD FOR '$MYUSER'@'$MYHOST' = '$MYPASSWORD'; "
|
||||
if [[ $excode -eq 0 && $result -eq 1 ]]; then
|
||||
# It exists, let's give it privileges.
|
||||
echo -e "GRANT" | "$TEE" -a "$LOGFILE"
|
||||
SQLVERB="GRANT ALL PRIVILEGES ON $MYDATABASE.* TO '$MYUSER'@'$MYHOST'; "
|
||||
result=$(eval "$DO_SQLVERB" 2> >("$TEE" -a "$LOGFILE" >&2)); excode=$?
|
||||
result="${result//[[:space:]]/}"
|
||||
[[ $excode -ne 0 ]] \
|
||||
&& echo -e "$MSG_FAILPASS $MYUSER@$MYHOST" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& echo -e "$MSG_FAILGRANT $MYUSER@$MYHOST" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& echo -e "$MSG_BLOCKING" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& exit 1
|
||||
fi
|
||||
#
|
||||
# Grants all privileges on the database to the user.
|
||||
#
|
||||
echo -e "GRANT" | "$TEE" -a "$LOGFILE"
|
||||
SQLVERB="GRANT ALL PRIVILEGES ON $MYDATABASE.* TO '$MYUSER'@'$MYHOST'; "
|
||||
# Checks '$MYUSER'@'%' as well.
|
||||
SQLVERB="SELECT COUNT(1) FROM mysql.user WHERE user = '$MYUSER' AND host = '%'; "
|
||||
result=$(eval "$DO_SQLVERB" 2> >("$TEE" -a "$LOGFILE" >&2)); excode=$?
|
||||
result="${result//[[:space:]]/}"
|
||||
if [[ $excode -eq 0 && $result -ne 1 ]]; then
|
||||
# Creates if it doesn't exist yet.
|
||||
echo -e "CREATE USER %" | "$TEE" -a "$LOGFILE"
|
||||
SQLVERB="CREATE USER '$MYUSER'@'%' IDENTIFIED BY '$MYPASSWORD'; "
|
||||
result=$(eval "$DO_SQLVERB" 2> >("$TEE" -a "$LOGFILE" >&2)); excode=$?
|
||||
result="${result//[[:space:]]/}"
|
||||
# Gives it up here if something went wrong.
|
||||
[[ $excode -ne 0 ]] \
|
||||
&& echo -e "$MSG_FAILUSER $MYUSER@%" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& echo -e "$MSG_BLOCKING" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& exit 1
|
||||
fi
|
||||
# Let's give it privileges.
|
||||
echo -e "GRANT %" | "$TEE" -a "$LOGFILE"
|
||||
SQLVERB="GRANT ALL PRIVILEGES ON $MYDATABASE.* TO '$MYUSER'@'%'; "
|
||||
result=$(eval "$DO_SQLVERB" 2> >("$TEE" -a "$LOGFILE" >&2)); excode=$?
|
||||
result="${result//[[:space:]]/}"
|
||||
[[ $excode -ne 0 ]] \
|
||||
&& echo -e "$MSG_FAILGRANT $MYUSER@$MYHOST" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& echo -e "$MSG_FAILGRANT $MYUSER@%" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& echo -e "$MSG_BLOCKING" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& exit 1
|
||||
# We've the database user with the proper password.
|
||||
#
|
||||
# We've the database user(s) with the proper grants.
|
||||
|
||||
# Drops all existing connections to the database being restored.
|
||||
#
|
||||
@ -438,8 +505,10 @@ if $ISDBA; then
|
||||
fi
|
||||
done
|
||||
fi
|
||||
#
|
||||
# Connections have eliminated (we hope).
|
||||
fi
|
||||
#
|
||||
# Done with the superuser part.
|
||||
|
||||
# Drops the database.
|
||||
@ -452,7 +521,7 @@ result="${result//[[:space:]]/}"
|
||||
[[ $excode -ne 0 ]] \
|
||||
&& echo -e "$MSG_BLOCKING" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& exit 1
|
||||
|
||||
#
|
||||
# Recreates the database.
|
||||
#
|
||||
echo -e "CREATE DATABASE" | "$TEE" -a "$LOGFILE"
|
||||
@ -463,6 +532,7 @@ result="${result//[[:space:]]/}"
|
||||
[[ $excode -ne 0 ]] \
|
||||
&& echo -e "$MSG_BLOCKING" | "$TEE" -a "$LOGFILE" >&2 \
|
||||
&& exit 1
|
||||
#
|
||||
# We've an empty database.
|
||||
|
||||
# Sets the default character set.
|
||||
@ -485,13 +555,15 @@ if [ -n "$MYCHARSET" ]; then
|
||||
&& exit 1
|
||||
fi
|
||||
fi
|
||||
#
|
||||
# We've the character set adjusted.
|
||||
|
||||
# Restores the database from the dump.
|
||||
#
|
||||
# This isn't so straightforward as in PostgreSQL.
|
||||
# This isn't so straightforward as in e.g PostgreSQL.
|
||||
# We'll use the database user's credentials, not the superuser's
|
||||
# to mitigate the effect of an unsanitized dump.
|
||||
#
|
||||
echo -e "RESTORE" | "$TEE" -a "$LOGFILE"
|
||||
# Let's identify the file is gzipped or not.
|
||||
UNPACKER=$("$FILE" --mime-type "$MYDUMPFILE")
|
||||
@ -501,24 +573,21 @@ UNPACKER=${UNPACKER##* } # The last word is the MIME-type.
|
||||
&& UNPACKER="$GUNZIP" \
|
||||
|| UNPACKER="$CAT"
|
||||
# This is a sed expression to modify the security definers within the dump.
|
||||
MOD_DEFINER="s/DEFINER=.*@[^ ]*/DEFINER=\`$MYUSER\`@\`$MYHOST\`/"
|
||||
# Considers the RDBMS environment.
|
||||
if [ -n "$MYCONTAINER" ]; then
|
||||
# Dockerized RDBMS.
|
||||
echo "MySQL dockerized - TODO!" | "$TEE" -a "$LOGFILE" >&2
|
||||
else
|
||||
# Native RDBMS.
|
||||
# Reads the dump, on the fly unpacks it and modifies the scurity definer,
|
||||
# then passes the data stream to the MySQL client.
|
||||
"$CAT" "$MYDUMPFILE" | "$UNPACKER" | "$SED" "$MOD_DEFINER" | \
|
||||
"$MYSQL" -u "$MYUSER" -p$MYPASSWORD -h "$MYHOST" -P "$MYPORT" \
|
||||
-f -D "$MYDATABASE" \
|
||||
>/dev/null 2> >("$TEE" -a "$LOGFILE" >&2); excode=$?
|
||||
# Unfortunately the result code doesn't differentiate the
|
||||
# blocking and non-blocking states.
|
||||
[[ $excode -ne 0 ]] \
|
||||
&& echo -e "$MSG_NONZERO: $excode" | "$TEE" -a "$LOGFILE" >&2
|
||||
fi
|
||||
MOD_DEFINER="s/DEFINER=.*@[^ ]*/DEFINER=CURRENT_USER/"
|
||||
#
|
||||
# We'll read the dump, on the fly unpack it and modify the security definer,
|
||||
# then we'll pass the data stream to the MySQL client.
|
||||
#
|
||||
DATABASE="$MYDATABASE"
|
||||
SQLVERB=""
|
||||
(eval "$DO_SQLSTREAM") \
|
||||
< <("$CAT" "$MYDUMPFILE" | "$UNPACKER" | "$SED" "$MOD_DEFINER") \
|
||||
>/dev/null 2> >("$TEE" -a "$LOGFILE" >&2); excode=$?
|
||||
# Unfortunately the result code doesn't differentiate the
|
||||
# blocking and non-blocking states.
|
||||
[[ $excode -ne 0 ]] \
|
||||
&& echo -e "$MSG_NONZERO: $excode" | "$TEE" -a "$LOGFILE" >&2
|
||||
#
|
||||
# We had a try to restore the database - the result isn't properly defined.
|
||||
|
||||
# Closing log entry will be handled via EXIT trap.
|
||||
|
@ -55,6 +55,9 @@
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-03-06 v1.1
|
||||
# new: added the --noconf option which prevents reading and creating
|
||||
# the configuration file.
|
||||
# 2023-06-18 v1.0
|
||||
# new: forked from the "SMARTERP_skeleton" repository.
|
||||
# 2021.02.12 v0.3
|
||||
@ -77,6 +80,7 @@ RETAIN_MONTHS=${RETAIN_MONTHS-"12"}
|
||||
# Other initialisations (maybe overridden by configuration).
|
||||
#
|
||||
DOIT=""
|
||||
NOCONF=""
|
||||
|
||||
# Messages (maybe overriden by configuration).
|
||||
#
|
||||
@ -106,6 +110,7 @@ do
|
||||
case ${option} in
|
||||
"-" )
|
||||
if [ "$OPTARG" = "doit" ]; then DOIT="yes"
|
||||
elif [ "$OPTARG" = "noconf" ]; then NOCONF="yes"
|
||||
else echo "$MSG_BADOPT --$OPTARG" >&2; exit 1
|
||||
fi
|
||||
;;
|
||||
@ -135,9 +140,10 @@ BACKUP_FOLDER=${BACKUP_FOLDER%/}
|
||||
if [ -z "$BACKUP_FOLDER" -o ! -d "$BACKUP_FOLDER" -o ! -w "$BACKUP_FOLDER" ]
|
||||
then echo -e "$MSG_BADFOLDER $BACKUP_FOLDER" >&2; exit 1; fi
|
||||
|
||||
# Gets the configuration (if any).
|
||||
# Applies the configuration (if it exists and if it doesn't need to be ignored).
|
||||
BACKUP_CONF="$BACKUP_FOLDER/.$("$BASENAME" "$0").conf"
|
||||
if [ -r $BACKUP_CONF ]; then . "$BACKUP_CONF"
|
||||
if [ "$NOCONF" = "yes" ]; then :
|
||||
elif [ -r $BACKUP_CONF ]; then . "$BACKUP_CONF"
|
||||
else
|
||||
# Warns about failure.
|
||||
echo -e "$MSG_NOCONF $BACKUP_CONF"
|
||||
|
@ -67,24 +67,15 @@ server {
|
||||
You may report this at <a href='mailto:$server_admin'>$server_admin</a>.</span>";
|
||||
}
|
||||
#
|
||||
# Static service.
|
||||
# location / {
|
||||
# root $PAR_SERVICE/storage/volumes/staticweb/;
|
||||
# allow all;
|
||||
# autoindex off;
|
||||
# index index.html index.htm;
|
||||
# try_files $ri $uri/ =404;
|
||||
# }
|
||||
# Optional export backups function.
|
||||
# Needs ACLs, see the include file.
|
||||
#include $PAR_SERVICE/configs/nginx_xport.inc;
|
||||
#
|
||||
# No static service, but we may provide the OPTIONS for a potential DAV client.
|
||||
# location / {
|
||||
# types { } default_type text/html;
|
||||
## dav_ext_methods OPTIONS;
|
||||
# if ($request_method != OPTIONS) {
|
||||
# return 404 "<span style='font-size: x-large'>Sorry try <a href='$scheme://$server_name/$PAR_LOCATION'>$scheme://$server_name/$PAR_LOCATION</a> instead.</span>";
|
||||
## return 302 $scheme://$server_name/$PAR_LOCATION;
|
||||
# }
|
||||
# }
|
||||
# Optional simple static service.
|
||||
#include $PAR_SERVICE/configs/nginx_static.inc;
|
||||
#
|
||||
# Optional simple disabled-static servioe.
|
||||
#include $PAR_SERVICE/configs/nginx_nostatic.inc;
|
||||
|
||||
##################################################################################
|
||||
# The SSL part
|
||||
|
11
.templates/nginx/nginx_nostatic.inc
Normal file
11
.templates/nginx/nginx_nostatic.inc
Normal file
@ -0,0 +1,11 @@
|
||||
# Includable nginx configuration.
|
||||
#
|
||||
# A simple no-static service.
|
||||
location / {
|
||||
types { } default_type text/html;
|
||||
# dav_ext_methods OPTIONS;
|
||||
if ($request_method != OPTIONS) {
|
||||
return 404 "<span style='font-size: x-large'>Sorry try <a href='$scheme://$server_name/$PAR_LOCATION'>$scheme://$server_name/$PAR_LOCATION</a> instead.</span>";
|
||||
# return 302 $scheme://$server_name/$PAR_LOCATION;
|
||||
}
|
||||
}
|
10
.templates/nginx/nginx_static.inc
Normal file
10
.templates/nginx/nginx_static.inc
Normal file
@ -0,0 +1,10 @@
|
||||
# Includable nginx configuration.
|
||||
#
|
||||
# A simple static service.
|
||||
location / {
|
||||
root $PAR_SERVICE/storage/volumes/staticweb/;
|
||||
allow all;
|
||||
autoindex off;
|
||||
index index.html index.htm;
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
20
.templates/nginx/nginx_xport.inc
Normal file
20
.templates/nginx/nginx_xport.inc
Normal file
@ -0,0 +1,20 @@
|
||||
# Includable nginx configuration.
|
||||
#
|
||||
# Export backups feature.
|
||||
# Needs
|
||||
# setfacl -m u:www-data:r configs/xport_backup
|
||||
# chmod a-x configs/xport_backup
|
||||
# setfacl -m u:www-data:rx storage/backups
|
||||
# setfacl -m u:www-data:rx storage/backups/export
|
||||
# setfacl -d -m u:www-data:r storage/backups/export
|
||||
# ACLs.
|
||||
location /export {
|
||||
root $PAR_SERVICE/storage/backups;
|
||||
auth_basic "Export backups area";
|
||||
auth_basic_user_file $PAR_SERVICE/configs/xport_backup;
|
||||
allow all;
|
||||
autoindex on;
|
||||
autoindex_exact_size off;
|
||||
autoindex_format html;
|
||||
autoindex_localtime on;
|
||||
}
|
229
.utils/downsync/downsync
Executable file
229
.utils/downsync/downsync
Executable file
@ -0,0 +1,229 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# A humble shell script for one-way (down) synchronization of a remote
|
||||
# web folder (e.g created by the xbackup utility). Performs the basic
|
||||
# authentication if it is necessary.
|
||||
#
|
||||
# It does not handle any remote subfolders, only the root folder. Downloads
|
||||
# all files that do not exist locally. Updates only an existing file that is
|
||||
# older than the remote source. It warns of errors or possible inconsistencies.
|
||||
#
|
||||
# Actually it is only a pretty fatty wrapper to the wget :).
|
||||
# Creates a unique log file in the local folder (this can be disabled).
|
||||
#
|
||||
# Usage: $0 [ -u remote_usename ] [ -p base64_encoded_password ]
|
||||
# [ -m max_tries ] [ -w wait_seconds ] [ -s small_size_warn ]
|
||||
# [ --nolog ] [ --info | --verbose ]
|
||||
# remote_URI [ local_folder ]
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-03-21 v0.1 Initial release.
|
||||
|
||||
# Messages.
|
||||
#
|
||||
MSG_BADEXIT="The worker finished with an exit code:"
|
||||
MSG_BADOPT="Invalid option"
|
||||
MSG_BADLOCAL="Must be an existing writable folder:"
|
||||
MSG_BADPARAM="Doubtful parameter:"
|
||||
MSG_BADPASS="Password must be base64-encoded:"
|
||||
MSG_BADURI="Must be a valid http(s) address:"
|
||||
MSG_MISSINGLOCAL="Please specify the local folder."
|
||||
MSG_MISSINGURI="Remote URI is mandatory."
|
||||
MSG_SMALLFILES="List of the downloaded files shorter than"
|
||||
|
||||
MSG_USAGE="Usage: $0 [ -u remote_usename ] [ -p base64_encoded_password ] "
|
||||
MSG_USAGE+="[ -m max_tries ] [ -w wait_seconds ] [ -s small_size_warn ] "
|
||||
MSG_USAGE+="[ --nolog ] [ --info | --verbose ] "
|
||||
MSG_USAGE+=" remote_URI [ local_folder ]"
|
||||
|
||||
# Basic environment settings.
|
||||
#
|
||||
LANG=C
|
||||
LC_ALL=C
|
||||
|
||||
# Initialisations.
|
||||
#
|
||||
LOGSTAMP="\"\$DATE\" +%Y%m%d-%H%M%S" # Timestamp format for logfile
|
||||
NEWFILEMINS=60 # A file younger than this is "new"
|
||||
SHORTFILEEX="\(\.log\|\.tmp\)$" # Exceptions to short file checking
|
||||
#
|
||||
WGET_OPTIONS="-e robots=off --no-parent --no-directories "
|
||||
WGET_OPTIONS+="--recursive --level=1 --exclude-directories='*' --reject index.htm* "
|
||||
WGET_OPTIONS+="--timestamping --continue "
|
||||
WGET_OPTIONS+="--no-verbose "
|
||||
|
||||
# Default parameters.
|
||||
#
|
||||
MAXTRIES=3 # On error it will try to download a file
|
||||
# at most this many times.
|
||||
NOLOG="" # If not empty, it will not write log file.
|
||||
SMALLSIZE=1024 # Warns if the downloaded file isn't a log
|
||||
# and is shorter than this value.
|
||||
VERBOSE="" # If not empty, it will display log lines.
|
||||
WAITSECS=5 # On error it will wait this many seconds between
|
||||
# two download attempts.
|
||||
|
||||
# Gets the options (if any).
|
||||
#
|
||||
while getopts ":-:m:M:p:P:s:S:u:U:w:W:" option
|
||||
do
|
||||
case ${option} in
|
||||
"-" )
|
||||
if [ "$OPTARG" = "nolog" ]; then NOLOG="yes"
|
||||
elif [ "$OPTARG" = "info" ]; then VERBOSE="yes"
|
||||
elif [ "$OPTARG" = "verbose" ]; then VERBOSE="yes"
|
||||
elif [ "$OPTARG" = "help" ]; then echo -e "$MSG_USAGE" >&2; exit
|
||||
else echo "$MSG_BADOPT --$OPTARG" >&2; exit 1
|
||||
fi
|
||||
;;
|
||||
"m" | "M" )
|
||||
MAXTRIES="$OPTARG"
|
||||
;;
|
||||
"p" | "P" )
|
||||
MYPASS="$OPTARG"
|
||||
;;
|
||||
"s" | "S" )
|
||||
SMALLSIZE="$OPTARG"
|
||||
;;
|
||||
"u" | "U" )
|
||||
MYUSER="$OPTARG"
|
||||
;;
|
||||
"w" | "W" )
|
||||
WAITSECS="$OPTARG"
|
||||
;;
|
||||
\? )
|
||||
echo "$MSG_BADOPT -$OPTARG" >&2; exit 1
|
||||
;;
|
||||
esac
|
||||
done; shift $((OPTIND -1))
|
||||
#
|
||||
# All provided options were processed.
|
||||
|
||||
# Checks the dependencies.
|
||||
#
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in base64 basename cat date find grep tee wget
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
#
|
||||
# All dependencies are available via "$THECOMMAND" (upper case) call.
|
||||
|
||||
# Sanitizes the options.
|
||||
# Some below are just arbitrary restrictions (reliable source: TODO!).
|
||||
#
|
||||
# MAXTRIES is a non-zero positive integer:
|
||||
[[ -n "$MAXTRIES" ]] && [[ ! "$MAXTRIES" =~ ^[1-9][0-9]*$ ]] \
|
||||
&& echo -e "$MSG_BADPARAM -m $MAXTRIES\n$MSG_USAGE" >&2 && exit 1
|
||||
# MYUSER is empty or not too strange (whatever this means):
|
||||
[[ -n "$MYUSER" ]] && [[ ! "$MYUSER" =~ ^([[:alnum:]]|[.-_\\+])*$ ]] \
|
||||
&& echo -e "$MSG_BADPARAM -u $MYUSER\n$MSG_USAGE" >&2 && exit 1
|
||||
# MYPASS is empty or Base64-encoded:
|
||||
if [ -n "$MYPASS" ]; then
|
||||
[[ ! "$MYPASS" =~ ^[-A-Za-z0-9+/]*={0,3}$ ]] \
|
||||
&& echo -e "$MSG_BADPASS -p $MYPASS\n$MSG_USAGE" >&2 && exit 1
|
||||
# Tries to decode it.
|
||||
echo "$MYPASS" | "$BASE64" --decode >/dev/null 2>&1
|
||||
[[ $? -gt 0 ]] \
|
||||
&& echo -e "$MSG_BADPASS -p $MYPASS\n$MSG_USAGE" >&2 && exit 1
|
||||
MYPASS=$(echo "$MYPASS" | "$BASE64" --decode)
|
||||
fi
|
||||
# SMALLSIZE is a non-zero positive integer:
|
||||
[[ -n "$SMALLSIZE" ]] && [[ ! "$SMALLSIZE" =~ ^[1-9][0-9]*$ ]] \
|
||||
&& echo -e "$MSG_BADPARAM -s $SMALLSIZE\n$MSG_USAGE" >&2 && exit 1
|
||||
# WAITSECS is a non-negative integer (can be zero):
|
||||
[[ -n "$WAITSECS" ]] && [[ ! "$WAITSECS" =~ ^[0-9][0-9]*$ ]] \
|
||||
&& echo -e "$MSG_BADPARAM -w $WAITSECS\n$MSG_USAGE" >&2 && exit 1
|
||||
#
|
||||
# We checked the options at least minimally.
|
||||
|
||||
# Formally checks the remote URI povided.
|
||||
#
|
||||
# 1st non-option parameter is the remote URI.
|
||||
if [ -z "$REMOTEURI" -a -n "$1" ]; then REMOTEURI="$1"; shift; fi
|
||||
# It is mandatory.
|
||||
[[ -z "$REMOTEURI" ]] \
|
||||
&& echo -e "$MSG_MISSINGURI\n$MSG_USAGE" >&2 && exit 1
|
||||
# Must be a valid http(s) address.
|
||||
[[ ! "$REMOTEURI" =~ ^https?://([[:alnum:]]|[.-])/?.*$ ]] \
|
||||
&& echo -e "$MSG_BADURI $REMOTEURI" >&2 && exit 1
|
||||
# Adds a trailing slash.
|
||||
REMOTEURI="${REMOTEURI%/}/"
|
||||
#
|
||||
# We checked the remote URI at least minimally.
|
||||
|
||||
# Determines the download directory.
|
||||
#
|
||||
# 2nd non-option parameter is the local folder's pathname.
|
||||
if [ -z "$LOCALDIR" -a -n "$1" ]; then LOCALDIR="$1"; shift; fi
|
||||
# Defaults to the current folder.
|
||||
[[ -z "$LOCALDIR" ]] && LOCALDIR="$PWD"
|
||||
[[ -z "$LOCALDIR" ]] && LOCALDIR="$($(which pwd))"
|
||||
# This should not happen... Gives it up.
|
||||
[[ -z "$LOCALDIR" ]] \
|
||||
&& echo -e "$MSG_MISSINGLOCAL" >&2 && exit 1
|
||||
# Must be a writable folder.
|
||||
if [ ! -d "$LOCALDIR" -o ! -w "$LOCALDIR" ]; then
|
||||
echo -e "$MSG_BADLOCAL $LOCALDIR" >&2; exit 1; fi
|
||||
# Removes the trailing slash (if any).
|
||||
LOCALDIR="${LOCALDIR%/}"
|
||||
#
|
||||
# We've a suitable download directory.
|
||||
|
||||
# Tries to retrieve only newer files from the remote URL.
|
||||
#
|
||||
# Composes the credentials (if any).
|
||||
WGET_CREDENTIALS=""
|
||||
[[ -n "$MYUSER" ]] && WGET_CREDENTIALS="--http-user=$MYUSER --http-password=$MYPASS "
|
||||
#
|
||||
# Figures out how do we should logging.
|
||||
[[ -n "$NOLOG" ]] \
|
||||
&& LOGFILE="/dev/null" \
|
||||
|| LOGFILE="$LOCALDIR/$("$BASENAME" "$0")_$(eval $LOGSTAMP).log"
|
||||
#
|
||||
# Calls parametrized wget as a worker.
|
||||
if [ -n "$VERBOSE" ]; then
|
||||
# We also need to write to the console.
|
||||
"$WGET" $WGET_OPTIONS $WGET_CREDENTIALS --waitretry=$WAITSECS --tries=$MAXTRIES \
|
||||
--directory-prefix="$LOCALDIR" "$REMOTEURI" \
|
||||
>/dev/null 2> >("$TEE" "$LOGFILE" >&2); excode=$?
|
||||
else
|
||||
# We don't write to the console.
|
||||
"$WGET" $WGET_OPTIONS $WGET_CREDENTIALS --waitretry=$WAITSECS --tries=$MAXTRIES \
|
||||
--directory-prefix="$LOCALDIR" "$REMOTEURI" \
|
||||
>/dev/null 2>"$LOGFILE"; excode=$?
|
||||
fi
|
||||
#
|
||||
# Checks the exit code, warns if non-zero.
|
||||
if [[ excode -ne 0 ]]; then
|
||||
# Displays the log file even if we called it to be quiet.
|
||||
if [ -z "$VERBOSE" -a -z "$NOLOG" ]; then
|
||||
"$CAT" "$LOGFILE" 2>/dev/null
|
||||
fi
|
||||
# Shows/appends the warning.
|
||||
echo -e "\n$MSG_BADEXIT $excode" >&2
|
||||
fi
|
||||
#
|
||||
# We tried to synchronize, we did what we could.
|
||||
|
||||
# Checks the files that are currently being downloaded,
|
||||
# and warns if there are any short files among them.
|
||||
# Files with extensions in SHORTFILEEX and the current
|
||||
# logfile are excluded from this check.
|
||||
#
|
||||
SMALLFILES=$( \
|
||||
"$FIND" "$LOCALDIR" -type f \
|
||||
! -wholename "$LOGFILE" \
|
||||
-newermt "- $NEWFILEMINS minutes" \
|
||||
-size -${SMALLSIZE}c 2>/dev/null | \
|
||||
"$GREP" -iv -e "$SHORTFILEEX"
|
||||
)
|
||||
# Warns if there are small files even if we called it to be quiet.
|
||||
[[ -n "$SMALLFILES" ]] \
|
||||
&& echo -e "\n$MSG_SMALLFILES $SMALLSIZE:\n$SMALLFILES" >&2
|
||||
|
||||
# That's all, Folks! :)
|
233
.utils/downsync/downsync.ps1
Normal file
233
.utils/downsync/downsync.ps1
Normal file
@ -0,0 +1,233 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Powershell script for one-way (down) synchronization of a remote web folder.
|
||||
|
||||
.DESCRIPTION
|
||||
It does not handle any remote subfolders, only the root folder. Downloads
|
||||
all files that do not exist locally. Updates only an existing file that is
|
||||
older than the remote source. It warns of errors or possible inconsistencies.
|
||||
|
||||
Creates a unique log file in the local folder (this can be disabled).
|
||||
|
||||
Usage: $PSCommandPath -Remote URI_to_be_synced [-Local local_folder]
|
||||
[-User username] [-Pass base64_encoded_password]
|
||||
[-NoLog] [-Info]
|
||||
|
||||
Author: Zolt<6C>n KOV<4F>CS <kovacsz@marcusconsulting.hu>
|
||||
License: GNU/GPL 3+ https://www.gnu.org/licenses/gpl-3.0.html
|
||||
|
||||
.NOTES
|
||||
Changelog:
|
||||
2025-03-12 v0.1 Initial release.
|
||||
#>
|
||||
|
||||
# Command line parameters.
|
||||
#
|
||||
param (
|
||||
# An http(s) URI pointing to a remote web folder containing the files to synchronize.
|
||||
[Parameter()][string]$Remote,
|
||||
# An existing and writable local folder where the script will download the files.
|
||||
[Parameter()][string]$Local = $PSScriptRoot,
|
||||
# Credentials, if required by the remote website.
|
||||
[Parameter()][string]$User,
|
||||
# A base64-encoded password (if necessary).
|
||||
[Parameter()][string]$Pass,
|
||||
# On error the script will try to download a file at most this many times. Defaults to 3 tries.
|
||||
[Parameter()][int]$MaxTries,
|
||||
# On error the script will wait this many seconds between two download attempts. Defaults to 5 seconds.
|
||||
[Parameter()][int]$WaitRetry,
|
||||
# The script warns if the downloaded file is shorter than this value. Defaults to 1024 bytes.
|
||||
[Parameter()][int]$SmallSize,
|
||||
# If set, the script will not write log file.
|
||||
[Parameter()][switch]$NoLog = $false,
|
||||
# If set, the script will display log lines.
|
||||
[Parameter()][switch]$Info = $false
|
||||
)
|
||||
|
||||
# Initialisations.
|
||||
#
|
||||
if (-not $MaxTries) { $MaxTries = 3 }
|
||||
if (-not $SmallSize) { $SmallSize = 1024 }
|
||||
if (-not $WaitRetry) { $WaitRetry = 5 }
|
||||
|
||||
# Messages.
|
||||
#
|
||||
$Message = @{}
|
||||
$Message['Bad DNS'] = 'Remote host is not an IP and not resolvable.'
|
||||
$Message['Bad folder'] = "The local path must point to a writable folder."
|
||||
$Message['Bad URI'] = 'Remote parameter must be a valid http(s) URI.'
|
||||
$Message['Collision array'] = "The local path is an existing array:"
|
||||
$Message['Downloaded'] = "Downloaded file:"
|
||||
$Message['Empty filelist'] = "List of files is empty:"
|
||||
$Message['Finished'] = "Synchronisation finished."
|
||||
$Message['Is a folder'] = "Remote subfolders are ignored:"
|
||||
$Message['Local newer'] = "The files are different but the local one is newer:"
|
||||
$Message['Size mismatch'] = "Size of the downloaded file differ:"
|
||||
$Message['Started'] = "Sychronisation started."
|
||||
$Message['Unable fetchdir'] = "Unable to fetch the content of the remote folder."
|
||||
$Message['Unable to decode'] = 'Password must be properly base64 encoded.'
|
||||
$Message['Unable to stat remote'] = 'Unable to stat the remote object:'
|
||||
$Message['Small file'] = "File is smaller than " + $SmallSize + " bytes:"
|
||||
|
||||
$Message['Usage'] = "Usage:`n" + `
|
||||
$PSCommandPath + ' -Remote URI_to_be_synced [-Local local_folder] ' + `
|
||||
'[-User username] [-Pass base64_encoded_password] ' + `
|
||||
'[-NoLog True] [-Info True]'
|
||||
|
||||
# Logger function.
|
||||
#
|
||||
function Write-Log {
|
||||
$date = Get-Date -Format "yyyy-MM-dd HH:mm:ss.fff"
|
||||
if ( -not($NoLog)) { Add-Content -Path $LogFilePath -Value "$date $args" }
|
||||
if ($Info) { Write-Host $args }
|
||||
}
|
||||
|
||||
# Checks the -Remote parameter.
|
||||
#
|
||||
# It is mandatory.
|
||||
if ( -not("$Remote")) { Write-Host $Message['Usage']; exit 1 }
|
||||
# The closing / is necessary.
|
||||
$Remote = $Remote.TrimEnd('/') + '/'
|
||||
# Must be well-formed and http(s).
|
||||
if ( -not([uri]::IsWellFormedUriString("$Remote", 'Absolute')) -or -not(([uri] "$Remote").Scheme -in 'http', 'https')) {
|
||||
Write-Host $Message['Bad URI']; exit 1 }
|
||||
# Must be IPv4 or resolvable.
|
||||
if ( -not(([uri]"$Remote").Host -match "^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$" -and [bool](([uri]"$Remote").Host -as [ipaddress]))) {
|
||||
# It is resolvable?
|
||||
try { Resolve-DnsName -Name ([uri]"$Remote").Host -ErrorAction Stop | Out-Null }
|
||||
catch { Write-Host $Message['Bad DNS']; exit 1 }
|
||||
}
|
||||
#
|
||||
# We've a somewhat checked remote.
|
||||
|
||||
# Checks the -Local parameter.
|
||||
#
|
||||
# Must be an existing, writable folder.
|
||||
if ( -not("$Local")) { $Local = $PSScriptRoot }
|
||||
if ( -not(Test-Path -LiteralPath "$Local" -pathType Container)) { Write-Host $Message['Bad folder']; exit 1 }
|
||||
# Can we write into?
|
||||
try {
|
||||
$testfile = $Local + '\' + [guid]::NewGuid() + '.tmp'
|
||||
[io.file]::OpenWrite("$testfile").close()
|
||||
Remove-Item -ErrorAction SilentlyContinue "$testfile" }
|
||||
catch { Write-Host $Message['Bad folder']; exit 1 }
|
||||
#
|
||||
# We've a somewhat checked local folder.
|
||||
|
||||
# Decodes the provided -Pass (if any).
|
||||
#
|
||||
if ("$Pass") {
|
||||
try { $Pass = ([System.Text.Encoding]::ASCII.GetString([System.Convert]::FromBase64String($Pass))) }
|
||||
catch { Write-Host $Message['Unable to decode']; exit 1 }
|
||||
}
|
||||
#
|
||||
# We've a decoded (or empty) password.
|
||||
|
||||
# Initializes the log file.
|
||||
#
|
||||
$LogFilePath = $Local + '\' + (Get-Item $PSCommandPath ).Basename + (Get-Date -Format "-yyyyMMdd-HHmmss") +'.log'
|
||||
Write-Log $Message['Started']
|
||||
#
|
||||
# We've the log file ready to use.
|
||||
|
||||
# Prepares the Authorization header from provided credentials (if any).
|
||||
#
|
||||
$Headers = ''
|
||||
if ("$User" ) {
|
||||
$encoded = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("$($User):$($Pass)"))
|
||||
$Headers = @{ Authorization = "Basic $encoded" }
|
||||
}
|
||||
# We've an Authorization header ready to use for Webrequests.
|
||||
|
||||
# Let's get the directory index from the remote source.
|
||||
#
|
||||
$response = ''
|
||||
try {
|
||||
$ProgressPreference = 'SilentlyContinue'
|
||||
if ("$Headers") {$response = (Invoke-WebRequest -Uri "$Remote" -Headers $Headers -UseBasicParsing) }
|
||||
else {$response = (Invoke-WebRequest -Uri "$Remote" -UseBasicParsing ) }
|
||||
}
|
||||
catch { Write-Log $Message['Unable fetchdir'] "$Remote" $_.Exception.Response.StatusCode.Value__ $_.Exception.Response.StatusDescription; exit 1 }
|
||||
$files = @($response.Links.HREF | select -skip 1)
|
||||
#
|
||||
# We send a warning if it is empty.
|
||||
#
|
||||
if ($files.Count -eq 0) { Write-Log $Message['Empty filelist'] "$Remote" }
|
||||
#
|
||||
# We've the list of remote files.
|
||||
|
||||
# Processes the remote files in a row, one after the other.
|
||||
#
|
||||
foreach ($file in $files) {
|
||||
#
|
||||
# Let's get the parameters of the remote object. On error we send a warning and move on.
|
||||
#
|
||||
$remoteHeaders = ''
|
||||
try {
|
||||
$ProgressPreference = 'SilentlyContinue'
|
||||
if ("$Headers") { $remoteHeaders = (Invoke-WebRequest -Uri ("$Remote" + "$file") -Headers $Headers -Method Head -UseBasicParsing ).Headers }
|
||||
else { $remoteHeaders = (Invoke-WebRequest -Uri ("$Remote" + "$file") -Method Head -UseBasicParsing).Headers }
|
||||
}
|
||||
catch { Write-Log $Message['Unable to stat remote'] ("$Remote" + "$file") $_.Exception.Message; continue }
|
||||
$remoteDate = $remoteHeaders['Last-Modified']
|
||||
$remoteSize = $remoteHeaders['Content-Length']
|
||||
$remoteType = $remoteHeaders['Content-Type']
|
||||
#
|
||||
# If the remote object is a folder we send a warning and move on.
|
||||
#
|
||||
if ("$remoteType" -eq 'text/directory') { Write-Log $Message['Is a folder'] ("$Remote" + "$file"); continue }
|
||||
#
|
||||
# If we've a local object and it is a folder we send a warning and move on.
|
||||
#
|
||||
if (Test-Path -LiteralPath "$Local\$file" -PathType Container) { Write-Log $Message['Collision array'] "$Local\$file"; continue }
|
||||
#
|
||||
# We've an existing local file?
|
||||
#
|
||||
if (Test-Path -LiteralPath "$Local\$file" -PathType Leaf) {
|
||||
$localDate = (Get-Item -LiteralPath ("$Local" + '\' + "$file")).LastWriteTime.DateTime
|
||||
$localSize = (Get-Item -LiteralPath ("$Local" + '\' + "$file")).Length
|
||||
#
|
||||
# If the local file is newer than remote we don't replace it, but we send a warning if the sizes are different.
|
||||
#
|
||||
if ((Get-Date $localDate) -gt (Get-Date $remoteDate)) {
|
||||
if ( $localSize -ne $remoteSize ) { Write-Log $Message['Local newer'] $file }
|
||||
continue
|
||||
}
|
||||
}
|
||||
#
|
||||
# OK, we decided to download the remote file.
|
||||
# On failure, we'll try again a few times.
|
||||
#
|
||||
for ($i = 1; $i -le $MaxTries; $i++) {
|
||||
try {
|
||||
$ProgressPreference = 'SilentlyContinue'
|
||||
if ("$Headers") { Invoke-WebRequest -Uri ("$Remote" + "$file") -Headers $Headers -OutFile ($Local + '\' + $file) }
|
||||
else { Invoke-WebRequest -Uri ("$Remote" + "$file") -OutFile ($Local + '\' + $file) }
|
||||
#
|
||||
Write-Log $Message['Downloaded'] ("$Remote" + "$file")
|
||||
#
|
||||
# Checks the size of the downloaded file, stops trying if it is OK.
|
||||
#
|
||||
$localSize = (Get-Item -LiteralPath ("$Local" + '\' + "$file")).Length
|
||||
if ( $localSize -eq $remoteSize ) {
|
||||
#
|
||||
# We send a warning on small files (except the logs).
|
||||
#
|
||||
if ($localSize -lt $SmallSize -and (Get-Item ("$Local" + "\" + "$file")).Extension -notin ('.log')) {
|
||||
Write-Log $Message['Small file'] ("$Local" + "\" + "$file") }
|
||||
break
|
||||
}
|
||||
#
|
||||
Write-Log $Message['Size mismatch'] $Local\$file $localSize $remoteSize
|
||||
}
|
||||
catch { Write-Log $Message['Unable to download'] ("$Remote" + "$file") $_.Exception.Message }
|
||||
#
|
||||
# Waits before retrying.
|
||||
#
|
||||
Start-Sleep -Seconds $WaitRetry
|
||||
}
|
||||
}
|
||||
#
|
||||
# That's all.
|
||||
#
|
||||
Write-Log $Message['Finished']
|
17
.utils/downsync/getbackups
Normal file
17
.utils/downsync/getbackups
Normal file
@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script retrieves daily backups from our remote web services.
|
||||
# Contains (encoded) passwords, keep it confidentially!
|
||||
# Maintained by hand.
|
||||
|
||||
# A download step
|
||||
$HOME/bin/downsync \
|
||||
-u <https user> -p "<base64-encoded https password>" \
|
||||
'https://<remote website>/export' \
|
||||
'<download destination pathname>'
|
||||
|
||||
# More download steps (if any)
|
||||
|
||||
# Rotates all backup folders.
|
||||
# The actual schedule is defined per folder in the .rotate_folder.conf files.
|
||||
$HOME/bin/rotatebackups "<downloads destination root folder>"
|
47
.utils/downsync/rotatebackups
Executable file
47
.utils/downsync/rotatebackups
Executable file
@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Backup folders maintenance operation planned at once a day.
|
||||
# This script called usually by the cron (but indirectly).
|
||||
# Uses the rotate_folder utility which must be available on path.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-05-20 v0.1 Initial release
|
||||
|
||||
# Will maintain child subfolders of this directory.
|
||||
# Input parameter (if any) will be sanitized later.
|
||||
[[ -n "$1" ]] \
|
||||
&& BACKUPSROOT="$1" \
|
||||
|| BACKUPSROOT="$PWD"
|
||||
|
||||
# Checks the components.
|
||||
[[ -z "$(which dirname)" ]] && exit 1
|
||||
[[ -z "$(which readlink)" ]] && exit 1
|
||||
[[ -z "$(which xargs)" ]] && exit 1
|
||||
|
||||
# Where I'm?
|
||||
SCRPATH="$( cd -P "$( "$(which dirname)" "$0" )" && echo "$PWD" )"
|
||||
|
||||
# Rotates the backup folders.
|
||||
#
|
||||
# Enumerates the folders and tries to rotate they content.
|
||||
for folder in $(ls -1 "$BACKUPSROOT" 2>/dev/null | $(which xargs) -0 ) ""
|
||||
do
|
||||
if [ -n "$folder" ]; then
|
||||
# Dereferenced absolute path.
|
||||
folder="$("$(which readlink)" -e "$BACKUPSROOT/$folder")" #"
|
||||
# Does it a folder with a prepared configuration?
|
||||
if [ -d "$folder" -a -r "$folder/.rotate_folder.conf" ]; then
|
||||
# Does the rotate job.
|
||||
if [ -x "$SCRPATH/rotate_folder" ]; then
|
||||
"$SCRPATH/rotate_folder" -f "$folder" >/dev/null
|
||||
elif [ -x "$(which rotate_folder)" ]; then
|
||||
"$(which rotate_folder)" -f "$folder" >/dev/null
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
#
|
||||
# Done with rotating.
|
||||
|
||||
# That's all, Folks :)
|
1
configs/.gitignore
vendored
1
configs/.gitignore
vendored
@ -1,4 +1,5 @@
|
||||
# Ignore everything else in this directory.
|
||||
*
|
||||
!certs
|
||||
!xport_backup
|
||||
!.gitignore
|
||||
|
3
configs/xport_backup
Normal file
3
configs/xport_backup
Normal file
@ -0,0 +1,3 @@
|
||||
# Credentials file for exported backups feature.
|
||||
# Needs username:apr1-hashed password entries, one per line.
|
||||
# Use https://htpasswd.utils.com/ or some similar to fill in.
|
5
storage/.gitignore
vendored
Normal file
5
storage/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
# Ignore everything in this directory except this folders.
|
||||
*
|
||||
!.gitignore
|
||||
!backups
|
||||
!volumes
|
5
storage/backups/.gitignore
vendored
Normal file
5
storage/backups/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
# Ignore everything in this directory except these files.
|
||||
*
|
||||
!.gitignore
|
||||
!export
|
||||
!tarballs
|
4
storage/backups/export/.gitignore
vendored
Normal file
4
storage/backups/export/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
# Ignore everything in this directory except this file.
|
||||
*
|
||||
!.gitignore
|
||||
!.rotate_folder.conf
|
9
storage/backups/export/.rotate_folder.conf
Normal file
9
storage/backups/export/.rotate_folder.conf
Normal file
@ -0,0 +1,9 @@
|
||||
# This is a shell script excerpt for configuration purposes only.
|
||||
# Handle with care! Please don't put code here, only variables.
|
||||
|
||||
CLASSES_PATTERN="^([^.]*)\..*\.$HOSTNAME\.(dmp|sql\.gz|tgz|log)$"
|
||||
DOIT="yes" # if empty the script makes a dry run
|
||||
RETAIN_DAYS=7 # retains all files created within that many days
|
||||
RETAIN_WEEKS=0 # retains one file per week/month,
|
||||
RETAIN_MONTHS=0 # created within that many weeks/months
|
||||
|
109
tools/backup.d/xport_backup.sh
Normal file
109
tools/backup.d/xport_backup.sh
Normal file
@ -0,0 +1,109 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Optional additional backup operation, intended to export an (almost)
|
||||
# up-to-date downloadable copy for our customers about their data
|
||||
# handled by us. The script synchronizes some of the existing backup
|
||||
# files to an export folder that can be downloaded from the web.
|
||||
#
|
||||
# Uses the rotate_folder tool to select files to synchronize.
|
||||
# This tool must be somewhere in the path.
|
||||
#
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-05-26 v0.3
|
||||
# mod: The .gz extension has been added to the CLASSES_PATTERN.
|
||||
# 2025-04-02 v0.2
|
||||
# fix: Omits all warnings about missing source folder(s).
|
||||
# 2025-03-06 v0.1 Initial release
|
||||
|
||||
# Accepted environment variables and their defaults.
|
||||
PAR_BASEDIR=${PAR_BASEDIR:-""} # Service's base folder
|
||||
PAR_DUMPDIR=${PAR_DUMPDIR:-""} # Absolute path to DB dumps
|
||||
PAR_EXPORTDIR=${PAR_EXPORTDIR:-""} # Absolute path to export dir
|
||||
PAR_RETAINDAYS=${PAR_RETAINDAYS:-"1"} # Days to retain the copies
|
||||
PAR_TARBALLDIR=${PAR_TARBALLDIR:-""} # Absolute path to tgz dumps
|
||||
|
||||
# Other initialisations.
|
||||
CLASSES_PATTERN="^([^.]*)\..*\.$HOSTNAME\.(dmp|sql\.gz|gz|tgz|log)$"
|
||||
DUMPPATH="storage/backups/dumps" # Default path to DB dumps
|
||||
EXPORTPATH="storage/backups/export" # Default path to export dir
|
||||
TARBALLPATH="storage/backups/tarballs" # Default path to tgz dumps
|
||||
USER=${USER:-LOGNAME} # Fix for cron enviroment only
|
||||
YMLFILE="docker-compose.yml"
|
||||
|
||||
# Messages.
|
||||
MSG_MISSINGDEP="Fatal: missing dependency"
|
||||
MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
|
||||
MSG_NONWRITE="The target directory isn't writable"
|
||||
|
||||
# Checks the dependencies.
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in cp cut date dirname grep hostname readlink rotate_folder tar
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
|
||||
else echo "$MSG_MISSINGDEP $item." >&2; exit 1; fi
|
||||
done
|
||||
|
||||
# Where I'm?
|
||||
# https://gist.github.com/TheMengzor/968e5ea87e99d9c41782
|
||||
SOURCE="$0"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && pwd )" #"
|
||||
SOURCE="$("$READLINK" "$SOURCE")"
|
||||
# if $SOURCE was a relative symlink, we need to resolve it
|
||||
# relative to the path where the symlink file was located
|
||||
[[ $SOURCE != /* ]] && SOURCE="$SCRPATH/$SOURCE"
|
||||
done; SCRPATH="$( cd -P "$("$DIRNAME" "$SOURCE" )" && pwd )" #"
|
||||
|
||||
# Searches the base folder, containing a docker-compose.yml file.
|
||||
# Called from the base folder (./)?
|
||||
BASE_DIR="$PAR_BASEDIR"
|
||||
TEST_DIR="$SCRPATH"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# Called from ./tools/*.d?
|
||||
TEST_DIR="$("$DIRNAME" "$TEST_DIR")"
|
||||
[[ -z "$BASE_DIR" ]] && [[ -r "$TEST_DIR/$YMLFILE" ]] && BASE_DIR="$TEST_DIR"
|
||||
# On failure gives it up here.
|
||||
if [ -z "$BASE_DIR" -o ! -r "$BASE_DIR/$YMLFILE" ]; then
|
||||
echo "$MSG_MISSINGYML" >&2; exit 1
|
||||
fi
|
||||
# Sets the absolute paths.
|
||||
DUMPDIR="${PAR_DUMPDIR:-$BASE_DIR/$DUMPPATH}"
|
||||
EXPORTDIR="${PAR_EXPORTDIR:-$BASE_DIR/$EXPORTPATH}"
|
||||
TARBALLDIR="${PAR_TARBALLDIR:-$BASE_DIR/$TARBALLPATH}"
|
||||
|
||||
# Exits silently if EXPORTDIR isn't present.
|
||||
[[ ! -e "$EXPORTDIR" ]] && exit 0
|
||||
# EXPORTDIR must be writable.
|
||||
[[ ! -w "$EXPORTDIR" ]] \
|
||||
&& echo "$MSG_NONWRITE: $BACKUPDIR" >&2 && exit 1
|
||||
|
||||
# Let's select and copy the appropriate backup files.
|
||||
#
|
||||
# We'll call rotate_folder (dry run) with CLASSES_PATTERN and PAR_RETAINDAYS
|
||||
# set above to select relevant files created in the backup folders within last
|
||||
# PAR_RETAINDAYS days. These files are synchronized with the cp -u statement.
|
||||
#
|
||||
# Enumerates the folders.
|
||||
for folder in "$DUMPDIR" "$TARBALLDIR"
|
||||
do
|
||||
# Selects the appropriate files (which have the "DR" - daily retain - tag).
|
||||
for filename in $((export CLASSES_PATTERN="$CLASSES_PATTERN" \
|
||||
RETAIN_DAYS="$PAR_RETAINDAYS" RETAIN_WEEKS=0 RETAIN_MONTHS=0; \
|
||||
"$ROTATE_FOLDER" --noconf -f "$folder" 2>/dev/null) | \
|
||||
"$GREP" '^DR ' | "$CUT" -d' ' -f2) ""
|
||||
do
|
||||
# Updates the current file.
|
||||
if [ -n "$filename" ]; then
|
||||
"$CP" -u "$folder/$filename" "$EXPORTDIR/" 2>/dev/null
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# That's all, Folks! :)
|
@ -8,6 +8,8 @@
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-03-21 v1.2
|
||||
# fix: wrong output redirection order in docker compose.
|
||||
# 2024-08-24 v1.1
|
||||
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
|
||||
# 2023-06-18 v1.0
|
||||
@ -39,7 +41,7 @@ done
|
||||
#
|
||||
# Let's find which version of docker-compose is installed.
|
||||
commandstring=""
|
||||
if [ $($DOCKER compose version 2>&1 >/dev/null; echo $?) -eq 0 ]; then
|
||||
if [ $($DOCKER compose version >/dev/null 2>&1; echo $?) -eq 0 ]; then
|
||||
# We'll use v2 if it is available.
|
||||
DOCKER_COMPOSE="$DOCKER"
|
||||
commandstring="compose"
|
||||
|
@ -1,11 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
PAR_SERVICENAME=
|
||||
PAR_SERVICENAME=""
|
||||
PAR_PROXYHOST="localhost"
|
||||
PAR_PROXYPORT="8201"
|
||||
PAR_SERVERNAME="myservice.example.com"
|
||||
PAR_LOCATION=
|
||||
PAR_WEBMASTER="webmaster@example.com" # Valid support email address
|
||||
PAR_WEBMASTER="webmaster@example.com" # Valid support email address
|
||||
|
||||
################################################################################
|
||||
# There is nothing to change below this line (I hope)
|
||||
@ -18,8 +18,13 @@ PAR_SERVICE="$HOME/services/$PAR_SERVICENAME"
|
||||
PARAMETERS='$PAR_ACMEHOST:$PAR_ACMEPORT:$PAR_SERVICE:$PAR_PROXYHOST:$PAR_PROXYPORT:$PAR_SERVERNAME:$PAR_LOCATION:$PAR_WEBMASTER'
|
||||
for parameter in $(echo "$PARAMETERS" | tr ":" "\n")
|
||||
do export ${parameter:1}; done
|
||||
cat "$PAR_SERVICE/.templates/nginx/nginx.conf" | envsubst "$PARAMETERS" \
|
||||
> "$PAR_SERVICE/configs/nginx.conf"
|
||||
for template in $(cd "$PAR_SERVICE/.templates/nginx/"; ls -1 nginx*) ""
|
||||
do
|
||||
if [ -n "$template" ]; then
|
||||
cat "$PAR_SERVICE/.templates/nginx/$template" | envsubst "$PARAMETERS" \
|
||||
> "$PAR_SERVICE/configs/$template"
|
||||
fi
|
||||
done
|
||||
|
||||
touch $PAR_SERVICE/logs/web/access.log $PAR_SERVICE/logs/web/error.log
|
||||
chmod 660 $PAR_SERVICE/logs/web/access.log $PAR_SERVICE/logs/web/error.log
|
||||
|
@ -10,6 +10,8 @@
|
||||
# Author: Kovács Zoltán <kovacs.zoltan@smartfront.hu>
|
||||
# Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
# 2025-03-17 v1.2
|
||||
# mod: rotates non-empty logs even if the service isn't running.
|
||||
# 2024-08-24 v1.1
|
||||
# new: docker-compose v2 compatibility - tested with Ubuntu 24.04 LTS.
|
||||
# 2023-06-18 v1.0
|
||||
@ -98,10 +100,6 @@ CONFFILE="$CONFDIR/.${SCRFILE%.*}.conf"
|
||||
STATEFILE="$CONFDIR/.${SCRFILE%.*}.state"
|
||||
LOGDIR="${PAR_LOGDIR:-$BASE_DIR/$LOGDIR}"
|
||||
|
||||
# Doesn't rotate logs for stopped services.
|
||||
[[ -z "$(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps --services --filter "status=running")" ]] \
|
||||
&& exit 0
|
||||
|
||||
# Locates the worker script.
|
||||
WORKERSCRIPT="$SCRPATH/copytruncate"
|
||||
[[ ! -x "$WORKERSCRIPT" ]] && WORKERSCRIPT="$(which copytruncate)"
|
||||
@ -116,6 +114,7 @@ $LOGDIR/*.log {
|
||||
missingok
|
||||
daily
|
||||
rotate 30
|
||||
notifempty
|
||||
# Must be consistent with prerotate script's settings!
|
||||
dateext
|
||||
dateyesterday
|
||||
@ -143,6 +142,7 @@ $LOGDIR/web/*.log {
|
||||
missingok
|
||||
daily
|
||||
rotate 60
|
||||
notifempty
|
||||
# Must be consistent with prerotate script's settings!
|
||||
dateext
|
||||
dateyesterday
|
||||
|
@ -6,7 +6,11 @@
|
||||
# Author: Kovács Zoltán <kovacsz@marcusconsulting.hu>
|
||||
# License: GNU/GPL v3+ (https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
#
|
||||
# 2025-02-26 v0.1
|
||||
# 2025-06-01 v0.4
|
||||
# fix: better continuation of existing log file (even if empty).
|
||||
# 2025-03-29 v0.3
|
||||
# mod: no longer repeats existing log lines.
|
||||
# 2025-02-26 v0.2
|
||||
# fix: a silly typo (commandtring) blocked the startup.
|
||||
# 2025-02-02 v0.1 Initial release
|
||||
|
||||
@ -27,7 +31,7 @@ MSG_MISSINGYML="Fatal: didn't find the docker-compose.yml file"
|
||||
# Checks the dependencies.
|
||||
TR=$(which tr 2>/dev/null)
|
||||
if [ -z "$TR" ]; then echo "$MSG_MISSINGDEP tr."; exit 1 ; fi
|
||||
for item in cut dirname docker grep ps readlink
|
||||
for item in cut date dirname docker grep ps readlink tail
|
||||
do
|
||||
if [ -n "$(which $item)" ]
|
||||
then export $(echo $item | "$TR" '[:lower:]' '[:upper:]' | "$TR" '-' '_')=$(which $item)
|
||||
@ -93,9 +97,21 @@ else
|
||||
if [ -n "$service" ]; then
|
||||
# Converts the service's name to an actual running container's name.
|
||||
container="$("$DOCKER" inspect -f '{{.Name}}' $(cd "$BASE_DIR"; "$DOCKER_COMPOSE" $commandstring ps -q "$service") | "$CUT" -c2-)"
|
||||
# It will not start a new log if it already exists.
|
||||
# It will not start the log process if it already exists.
|
||||
if [ -z "$("$PS" auxww | "$GREP" "$DOCKER" | "$GREP" 'logs' | "$GREP" "$container" )" ]; then
|
||||
"$DOCKER" logs -t -f $container >> "$BASE_DIR/$LOGDIR/$service.log" 2>&1 &
|
||||
# Determines the last existing log line in the log file (if any).
|
||||
logline="$("$TAIL" -n1 "$BASE_DIR/$LOGDIR/$service.log" 2>/dev/null)"
|
||||
# Gets the timestamp from this line.
|
||||
[[ -n "$logline" ]] \
|
||||
&& timestamp="$(echo "$logline" | "$CUT" -d' ' -f1 2>/dev/null)" \
|
||||
|| timestamp="invalid"
|
||||
# If the log does not contain a valid last timestamp, we write log lines
|
||||
# created since the last time the container was started.
|
||||
[[ $("$DATE" -d "$timestamp" >/dev/null 2>&1; echo $?) -eq 0 ]] \
|
||||
&& since="$timestamp" \
|
||||
|| since="$("$DOCKER" inspect -f '{{ .State.StartedAt }}' $container)"
|
||||
# Only logs the new lines (actually repeats the last one - TODO!).
|
||||
"$DOCKER" logs -t --since "$since" -f $container >> "$BASE_DIR/$LOGDIR/$service.log" 2>&1 &
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
Reference in New Issue
Block a user