diff --git a/.gitea/workflows/build.yml b/.gitea/workflows/build.yml index 7fad899..ba93475 100644 --- a/.gitea/workflows/build.yml +++ b/.gitea/workflows/build.yml @@ -4,11 +4,12 @@ on: push: branches: - main + - develop pull_request: jobs: build: - runs-on: prodesk + runs-on: prodesk steps: - name: Checkout repository uses: actions/checkout@v4 @@ -30,6 +31,5 @@ jobs: file: ./Dockerfile push: true tags: | - gitea.calahilstudios.com/${{ github.repository_owner }}/${{ github.event.repository.name }}:latest + gitea.calahilstudios.com/${{ github.repository_owner }}/${{ github.event.repository.name }}:develop gitea.calahilstudios.com/${{ github.repository_owner }}/${{ github.event.repository.name }}:${{ github.sha }} - diff --git a/.gitignore b/.gitignore index e69de29..4c49bd7 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1 @@ +.env diff --git a/Dockerfile b/Dockerfile index 1113b46..051862a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,37 +1,50 @@ -FROM ghcr.io/linuxserver/duplicati:2.1.0 +# Use LinuxServer.io Duplicati base +FROM linuxserver/duplicati:2.1.0 -ENV DEBIAN_FRONTEND=noninteractive -SHELL ["/bin/bash", "-o", "pipefail", "-c"] - -RUN apt-get update -y \ +# Install Docker CLI, bash, python3, btrfs support and all the app directories +RUN apt-get update \ && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ gnupg \ lsb-release \ - btrfs-progs \ - #&& rm -rf /var/lib/apt/lists/* \ - && install -m 0755 -d /etc/apt/keyrings \ - && curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc \ - && chmod a+r /etc/apt/keyrings/docker.asc \ - && echo \ - "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ - $(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \ - tee /etc/apt/sources.list.d/docker.list > /dev/null \ - && apt-get update -y \ - && apt-get install -y --no-install-recommends \ - cron \ bash \ + python3 \ + python3-pip \ + btrfs-progs \ + && mkdir -p /etc/apt/keyrings \ + && curl -fsSL "https://download.docker.com/linux/$(. /etc/os-release; echo "$ID")/gpg" \ + | gpg --dearmor -o /etc/apt/keyrings/docker.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] \ + https://download.docker.com/linux/$(. /etc/os-release; echo "$ID") \ + $(lsb_release -cs) stable" \ + | tee /etc/apt/sources.list.d/docker.list > /dev/null \ + && apt-get update \ + && apt-get install -y --no-install-recommends \ docker-ce-cli \ - postgresql-client \ + && groupadd -f docker \ + && usermod -aG docker abc \ && rm -rf /var/lib/apt/lists/* \ - && mkdir -p /backups + && mkdir -p /usr/local/bin /config /etc/services.d/backupbot -# Copy backup script +# Copy the backup script COPY backup.sh /usr/local/bin/backup.sh -RUN chmod +x /usr/local/bin/backup.sh \ - && mkdir -p /etc/services.d/backupbot +RUN chmod +x /usr/local/bin/backup.sh + +# Copy the environment variables for backupbot +COPY backupbot.conf /defaults/backupbot.conf +RUN chown www-data:www-data /defaults/backupbot.conf \ + && chmod 644 /defaults/backupbot.conf + +# Copy s6 service for backupbot COPY services/backupbot/run /etc/services.d/backupbot/run RUN chmod +x /etc/services.d/backupbot/run +# Copy web frontend +COPY web /app +RUN chmod +x /app/cgi-bin/backupbot.cgi +# Expose web frontend port +EXPOSE 8080 +# Keep duplicati entrypoint +ENTRYPOINT ["/init"] diff --git a/backup.sh b/backup.sh index 9ecaaa2..d5baa34 100644 --- a/backup.sh +++ b/backup.sh @@ -4,7 +4,6 @@ # Author: Calahil Studios # === CONFIGURATION === -LOG_FILE="$1" BACKUP_DIR="/backups/postgres_dumps" RETENTION_DAYS="${RETENTION_DAYS:-7}" # Keep 7 days of backups @@ -19,12 +18,12 @@ ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0 EOF ) -echo "[BACKUPBOT_INFO] Starting PostgreSQL backup service..." | tee -a "$LOG_FILE" +echo "[BACKUPBOT_INFO] Starting PostgreSQL backup service..." mkdir -p "$BACKUP_DIR" TIMESTAMP=$(date +'%Y-%m-%d_%H-%M-%S') -echo "[BACKUPBOT_INFO] $(date) - Starting backup cycle ($TIMESTAMP)" | tee -a "$LOG_FILE" -echo "[BACKUPBOT_INFO] Checking for running Postgres containers..." | tee -a "$LOG_FILE" +echo "[BACKUPBOT_INFO] $(date) - Starting backup cycle ($TIMESTAMP)" +echo "[BACKUPBOT_INFO] Checking for running Postgres containers..." # Find running containers matching known image names MATCHING_CONTAINERS=$( @@ -41,7 +40,7 @@ MATCHING_CONTAINERS=$( ) if [ -z "$MATCHING_CONTAINERS" ]; then - echo "[BACKUPBOT_WARN] No Postgres containers found." | tee -a "$LOG_FILE" + echo "[BACKUPBOT_WARN] No Postgres containers found." else for container in $MATCHING_CONTAINERS; do NAME=$(docker inspect --format '{{.Name}}' "$container" | sed 's#^/##') @@ -54,16 +53,16 @@ else PG_USER=$(docker inspect --format '{{range .Config.Env}}{{println .}}{{end}}' "$container" | grep POSTGRES_USER | cut -d= -f2) PG_PASS=$(docker inspect --format '{{range .Config.Env}}{{println .}}{{end}}' "$container" | grep POSTGRES_PASSWORD | cut -d= -f2) if docker exec -e PGPASSWORD="$PG_PASS" "$container" pg_dumpall -U "$PG_USER" -h 127.0.0.1 >"$FILE" 2>/tmp/pg_backup_error.log; then - echo "[BACKUPBOT_SUCCESS] Backup complete for $NAME -> $FILE" | tee -a "$LOG_FILE" + echo "[BACKUPBOT_SUCCESS] Backup complete for $NAME -> $FILE" else - echo "[BACKUPBOT_ERROR] Backup failed for $NAME (check /tmp/pg_backup_error.log)" | tee -a "$LOG_FILE" + echo "[BACKUPBOT_ERROR] Backup failed for $NAME (check /tmp/pg_backup_error.log)" fi # Retention cleanup find "$CONTAINER_BACKUP_DIR" -type f -mtime +$RETENTION_DAYS -name '*.sql' -delete done fi -echo "[BACKUPBOT_INFO] Creating a snapshot of /srv/appdata" | tee -a "$LOG_FILE" +echo "[BACKUPBOT_INFO] Creating a snapshot of /srv/appdata" btrfs subvolume snapshot -r /source/appdata /backups/snapshots/$(hostname)-$(date +%F) -echo "[BACKUPBOT_INFO] Backup cycle complete." | tee -a "$LOG_FILE" +echo "[BACKUPBOT_INFO] Backup cycle complete." diff --git a/backupbot.conf b/backupbot.conf new file mode 100644 index 0000000..483d3c2 --- /dev/null +++ b/backupbot.conf @@ -0,0 +1,9 @@ +TZ=America/Los_Angeles +BACKUP_DIR=/backups/postgres +LOG_FILE=/config/log/pgbackup.log +MAX_RETRIES=3 +GOTIFY_URL=http://gotify.example.com +GOTIFY_TOKEN=your_gotify_token_here +BACKUP_HOUR=03 +BACKUP_MINUTE=00 +BACKUPBOT_WEB_LOGGING=DEBUG diff --git a/docker-compose.yml b/docker-compose.yml index f5a30ca..a1cd073 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,11 +1,11 @@ services: backupbot: - image: gitea.calahilstudios.com/calahil/backupbot:latest + build: . container_name: backupbot privileged: true environment: - - PUID=0 - - PGID=0 + - PUID=1000 + - PGID=1000 - TZ=Etc/UTC - SETTINGS_ENCRYPTION_KEY=${KEY} - CLI_ARGS= #optional @@ -14,12 +14,11 @@ services: # Config dir for duplicati - /srv/appdata/duplicati/config:/config # Backup folder to store dumps/backups - - /srv/backups:/backups - # Local docker config dirs - - /srv/appdata:/source/appdata:rshared + - /srv/backups:/backups:rshared # Docker socket to list containers - /var/run/docker.sock:/var/run/docker.sock:ro ports: - 8200:8200 + - 8201:8080 restart: unless-stopped diff --git a/services/backupbot/run b/services/backupbot/run index 80a421c..6d82a03 100644 --- a/services/backupbot/run +++ b/services/backupbot/run @@ -1,27 +1,79 @@ #!/usr/bin/with-contenv bash set -e +# Source env if available +if [[ -f /config/backupbot.conf ]]; then + set -a + source /config/backupbot.conf + set +a +else + echo "[INFO] copying config vars from defaults..." + cp -r /defaults/backupbot.conf /config/ + set -a + source /config/backupbot.conf + set +a +fi -echo "[BACKUPBOT_INFO] Starting PostgreSQL backup loop service..." +# Start Python HTTP server for web config in background +cd /app -INTERVAL_HOURS="${INTERVAL_HOURS:-24}" +nohup python3 -m http.server 8080 --cgi 2>&1 & + +# Start backup scheduler STATE_FILE="/config/last_backup_date" -LOG_FILE="/config/log/pgbackup.log" -mkdir -p "$(dirname "$STATE_FILE")" "$(dirname "$LOG_FILE")" +# TZ +: "${TZ:=UTC}" +export TZ +# Retry config +RETRIES=3 +GOTIFY_URL="${GOTIFY_URL:-}" +GOTIFY_TOKEN="${GOTIFY_TOKEN:-}" + +# Helper: seconds until next 3AM +seconds_until_next_3am() { + local now next_3am + now=$(date +%s) + next_3am=$(date -d "today 03:00" +%s) + ((now >= next_3am)) && next_3am=$(date -d "tomorrow 03:00" +%s) + echo $((next_3am - now)) +} + +# Run backup with retries +run_backup() { + local attempt=1 + while ((attempt <= RETRIES)); do + echo "[INFO] Backup attempt $attempt" + if /usr/local/bin/backup.sh; then + echo "[SUCCESS] Backup completed" + return 0 + else + echo "[WARN] Backup failed on attempt $attempt" + ((attempt++)) + sleep 5 + fi + done + # Send Gotify notification if configured + if [[ -n "$GOTIFY_URL" && -n "$GOTIFY_TOKEN" ]]; then + curl -s -X POST "$GOTIFY_URL/message?token=$GOTIFY_TOKEN" \ + -F "title=Backup Failed" \ + -F "message=PostgreSQL backup failed after $RETRIES attempts" \ + -F "priority=5" + fi + return 1 +} + +# Main loop while true; do TODAY=$(date +%F) - - # Check if a backup already ran today if [[ -f "$STATE_FILE" && "$(cat "$STATE_FILE")" == "$TODAY" ]]; then - echo "[BACKUPBOT_INFO] Backup already completed today ($TODAY). Skipping." + echo "[INFO] Backup already done for $TODAY" else - echo "[BACKUPBOT_INFO] Triggering backup.sh at $(date)" - /usr/local/bin/backup.sh "$LOG_FILE" - echo "$TODAY" >"$STATE_FILE" - echo "[BACKUPBOT_INFO] Backup completed and date recorded." + echo "[INFO] Running backup for $TODAY" + if run_backup; then + echo "$TODAY" >"$STATE_FILE" + fi fi - - echo "[BACKUPBOT_INFO] Sleeping for $INTERVAL_HOURS hours..." - sleep "${INTERVAL_HOURS}h" + SECONDS_TO_WAIT=$(seconds_until_next_3am) + sleep "$SECONDS_TO_WAIT" done diff --git a/web/cgi-bin/backupbot.cgi b/web/cgi-bin/backupbot.cgi new file mode 100644 index 0000000..699eb76 --- /dev/null +++ b/web/cgi-bin/backupbot.cgi @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +import cgi +import cgitb +import os +import json +import sys +import traceback +import tempfile + +cgitb.enable() +print("Content-Type: application/json\n") + +ENV_FILE = "/config/backupbot.conf" +ZONEINFO_DIR = "/usr/share/zoneinfo" + +# Logging level from environment +LOG_LEVEL = os.environ.get("BACKUPBOT_WEB_LOGGING", "info").lower() +LOG_LEVELS = {"debug": 3, "info": 2, "warn": 1} + + +def log(level, message, exc=None): + """ + Docker-friendly logging. + level: "debug", "info", "warn" + exc: exception object (only used in debug) + """ + if LOG_LEVELS.get(level, 0) <= LOG_LEVELS.get(LOG_LEVEL, 0): + timestamp = ( + __import__("datetime") + .datetime.now() + .strftime( + "%Y-%m-%d \ + %H:%M:%S" + ) + ) + msg = f"[{timestamp}] [{level.upper()}] {message}" + print(msg, file=sys.stderr) + if exc and LOG_LEVEL == "debug": + traceback.print_exception( + type(exc), exc, exc.__traceback__, file=sys.stderr + ) + + +def read_env(): + env = {} + if os.path.exists(ENV_FILE): + try: + with open(ENV_FILE) as f: + for line in f: + line = line.strip() + if not line or "=" not in line: + continue + key, val = line.split("=", 1) + env[key.strip()] = val.strip() + except Exception as e: + log("warn", f"Failed to read config: {e}", e) + return env + + +def write_env(env): + try: + dir_name = os.path.dirname(ENV_FILE) + os.makedirs(dir_name, exist_ok=True) + # Write atomically to temp file + with tempfile.NamedTemporaryFile("w", dir=dir_name, delete=False) as tmp: + for key, val in env.items(): + tmp.write(f"{key}={val}\n") + temp_name = tmp.name + os.replace(temp_name, ENV_FILE) + log("info", f"Configuration saved to {ENV_FILE}") + except Exception as e: + log("warn", f"Failed to write config: {e}", e) + raise + + +def list_timezones(): + zones = [] + for root, _, files in os.walk(ZONEINFO_DIR): + rel_root = os.path.relpath(root, ZONEINFO_DIR) + if rel_root.startswith(("posix", "right")): + continue + for file in files: + if file.startswith(".") or file.endswith((".tab", ".zi")): + continue + zones.append(os.path.join(rel_root, file) if rel_root != "." else file) + return sorted(zones) + + +form = cgi.FieldStorage() +action = form.getvalue("action") + +try: + if action == "get": + env = read_env() + log("debug", f"Returning configuration: {env}") + print(json.dumps(env)) + elif action == "set": + raw_len = os.environ.get("CONTENT_LENGTH") + length = int(raw_len) if raw_len else 0 + data = json.loads(os.read(0, length)) + log("debug", f"Received new configuration: {data}") + env = read_env() + env.update(data) # update existing keys, add new keys + write_env(env) + print(json.dumps({"status": "ok", "message": "Configuration saved."})) + elif action == "get_timezones": + zones = list_timezones() + log("debug", f"Returning {len(zones)} timezones") + print(json.dumps({"timezones": zones})) + else: + log("warn", f"Invalid action requested: {action}") + print(json.dumps({"status": "error", "message": "Invalid action"})) +except Exception as e: + log("warn", f"Unhandled exception: {e}", e) + print(json.dumps({"status": "error", "message": str(e)})) diff --git a/web/index.html b/web/index.html new file mode 100644 index 0000000..dd8591f --- /dev/null +++ b/web/index.html @@ -0,0 +1,118 @@ + + + + + + BackupBot Configuration + + + + +

BackupBot Configuration

+ +
+ + + + + + + + + +
+ +

+ + + + +