Compare commits
5 Commits
ba63df67c3
...
refactor-t
| Author | SHA1 | Date | |
|---|---|---|---|
| e5cd26ef16 | |||
| 4cfb634397 | |||
| cb466747cd | |||
| 3f544409f1 | |||
| 6ff6e8759e |
28
.drone.yaml
28
.drone.yaml
@@ -1,28 +0,0 @@
|
||||
kind: pipeline
|
||||
type: docker
|
||||
|
||||
name: build-and-push-container
|
||||
|
||||
triggers:
|
||||
branch:
|
||||
- main
|
||||
event:
|
||||
- push
|
||||
|
||||
steps:
|
||||
- name: build
|
||||
image: docker:20
|
||||
commands:
|
||||
- docker build -t gitea.t000-n.de/t.behrendt/backupsidecar:${DRONE_COMMIT:0:8} .
|
||||
|
||||
- name: push
|
||||
image: docker:20
|
||||
settings:
|
||||
username:
|
||||
from_secret: REGISTRY_USERNAME
|
||||
password:
|
||||
from_secret: REGISTRY_PASSWORD
|
||||
repo: gitea.t000-n.de/t.behrendt/backupsidecar
|
||||
tags:
|
||||
- latest
|
||||
- ${DRONE_COMMIT:0:8}
|
||||
111
.gitea/workflows/cd.yaml
Normal file
111
.gitea/workflows/cd.yaml
Normal file
@@ -0,0 +1,111 @@
|
||||
name: CD
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
DOCKER_REGISTRY: gitea.t000-n.de
|
||||
|
||||
jobs:
|
||||
check-changes:
|
||||
name: Check changes
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
changes: ${{ steps.filter.outputs.code }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Get changed files
|
||||
id: filter
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
code:
|
||||
- 'src/**'
|
||||
- 'Dockerfile'
|
||||
- 'gitea/workflows/**'
|
||||
|
||||
build_and_push:
|
||||
name: Build and push
|
||||
needs:
|
||||
- check-changes
|
||||
if: ${{ needs.check-changes.outputs.changes != '0' }}
|
||||
strategy:
|
||||
matrix:
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- linux_${{ matrix.arch }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.DOCKER_REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USER }}
|
||||
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
- id: meta
|
||||
run: |
|
||||
echo REPO_NAME=$(echo ${GITHUB_REPOSITORY} | awk -F"/" '{print $2}' | tr '[:upper:]' '[:lower:]') >> $GITHUB_OUTPUT
|
||||
echo REPO_VERSION=$(git describe --tags --always | sed 's/^v//') >> $GITHUB_OUTPUT
|
||||
- uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
push: true
|
||||
provenance: false
|
||||
tags: |
|
||||
${{ env.DOCKER_REGISTRY }}/t.behrendt/${{ steps.meta.outputs.REPO_NAME }}:${{ steps.meta.outputs.REPO_VERSION }}-${{ matrix.arch }}
|
||||
|
||||
create_tag:
|
||||
name: Create tag
|
||||
needs:
|
||||
- check-changes
|
||||
if: ${{ needs.check-changes.outputs.changes != '0' }}
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
tag: ${{ steps.tag.outputs.new-tag }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: https://gitea.t000-n.de/t.behrendt/conventional-semantic-git-tag-increment@0.0.2
|
||||
id: tag
|
||||
with:
|
||||
token: ${{ secrets.GITEA_TOKEN }}
|
||||
- run: |
|
||||
git tag ${{ steps.tag.outputs.new-tag }}
|
||||
git push origin ${{ steps.tag.outputs.new-tag }}
|
||||
- name: Set output
|
||||
run: |
|
||||
echo "tag=${{ steps.tag.outputs.new-tag }}" >> $GITHUB_OUTPUT
|
||||
|
||||
create_manifest:
|
||||
name: Create manifest
|
||||
needs:
|
||||
- build_and_push
|
||||
- create_tag
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- id: meta
|
||||
run: |
|
||||
echo REPO_NAME=$(echo ${GITHUB_REPOSITORY} | awk -F"/" '{print $2}' | tr '[:upper:]' '[:lower:]') >> $GITHUB_OUTPUT
|
||||
echo REPO_VERSION=$(git describe --tags --always | sed 's/^v//') >> $GITHUB_OUTPUT
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.DOCKER_REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USER }}
|
||||
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
- run: |
|
||||
docker manifest create ${{ env.DOCKER_REGISTRY }}/t.behrendt/${{ steps.meta.outputs.REPO_NAME }}:${{ needs.create_tag.outputs.tag }} \
|
||||
${{ env.DOCKER_REGISTRY }}/t.behrendt/${{ steps.meta.outputs.REPO_NAME }}:${{ steps.meta.outputs.REPO_VERSION }}-amd64 \
|
||||
${{ env.DOCKER_REGISTRY }}/t.behrendt/${{ steps.meta.outputs.REPO_NAME }}:${{ steps.meta.outputs.REPO_VERSION }}-arm64
|
||||
|
||||
docker manifest push ${{ env.DOCKER_REGISTRY }}/t.behrendt/${{ steps.meta.outputs.REPO_NAME }}:${{ needs.create_tag.outputs.tag }}
|
||||
24
.gitea/workflows/ci.yaml
Normal file
24
.gitea/workflows/ci.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docker image
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- linux_amd64
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: false
|
||||
provenance: false
|
||||
tags: |
|
||||
backupsidecar:ci-test
|
||||
35
.gitignore
vendored
35
.gitignore
vendored
@@ -1 +1,34 @@
|
||||
dockerBuildAndPush.sh
|
||||
# dependencies (bun install)
|
||||
node_modules
|
||||
|
||||
# output
|
||||
out
|
||||
dist
|
||||
*.tgz
|
||||
|
||||
# code coverage
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# logs
|
||||
logs
|
||||
_.log
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# caches
|
||||
.eslintcache
|
||||
.cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# IntelliJ based IDEs
|
||||
.idea
|
||||
|
||||
# Finder (MacOS) folder config
|
||||
.DS_Store
|
||||
|
||||
20
Dockerfile
20
Dockerfile
@@ -1,14 +1,16 @@
|
||||
FROM alpine:3.17
|
||||
FROM alpine:3.22
|
||||
|
||||
# Setup correct TZ
|
||||
RUN apk add alpine-conf
|
||||
RUN /sbin/setup-timezone -z UTC
|
||||
|
||||
RUN apk add restic curl
|
||||
RUN apk update && apk add --no-cache \
|
||||
bash \
|
||||
curl \
|
||||
restic \
|
||||
postgresql-client \
|
||||
jq
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./src/entry.sh /app/
|
||||
COPY ./src/backup.sh /app/
|
||||
COPY src/backup.sh /app/backup.sh
|
||||
|
||||
CMD [ "/bin/sh", "entry.sh" ]
|
||||
RUN chmod +x /app/backup.sh
|
||||
|
||||
ENTRYPOINT ["/app/backup.sh"]
|
||||
|
||||
127
README.md
127
README.md
@@ -1,20 +1,113 @@
|
||||
# backupsidecar
|
||||
# BackupSidecar
|
||||
|
||||
Backup sidecar that automatically creates backups of one PVC and saves it to another PVC via restic
|
||||
BackupSidecar is a lightweight backup solution designed to run as a cron job in Kubernetes. It automates backups using Restic and supports both directory and PostgreSQL database backups. Notifications are sent via Gotify to keep you informed of backup results.
|
||||
|
||||
## Function
|
||||
A cronjob inside the container runs in the configured interval creating the backup and purging old backups.
|
||||
A notification is sent to gotify on completion of the backup or on error of either the backup or purge.
|
||||
## Configuration
|
||||
|
||||
## Environment Variables
|
||||
| ENV Variable | Required |Description| Example Value
|
||||
|--------------|----------|--------------|-
|
||||
|INTERVAL|yes|cronjob interval string|15 14 * * *|
|
||||
|RESTIC_REPOSITORY|yes|path of the restic repository|/mnt/backups/gitea|
|
||||
|SOURCEDIR |yes|path of the path to backup|/mnt/toBackup/|
|
||||
|KEEPLAST|yes|number of increments to keep (keep in mind that the number of backups to keep correlates with the interval in which they are created) |10|
|
||||
|RESTIC_PASSWORD|yes|password for the restic repository|******|
|
||||
|RUNONSTART|no|set to true to force a backup at the start of the container|true|
|
||||
|GOTIFYHOST|yes|URL of the gotify server (without trailing slash)|https://gotify.example.com|
|
||||
|GOTIFYTOKEN|yes|gotify app token|******|
|
||||
|GOTIFYTOPIC|yes|gotify topic to include in the notification|gotify|
|
||||
BackupSidecar is configured through environment variables. Below is a breakdown of the available settings.
|
||||
|
||||
### General Settings
|
||||
|
||||
These variables apply to both directory and PostgreSQL backups.
|
||||
|
||||
- **`BACKUP_MODE`** _(optional)_ - Defines the backup type (`directory` or `postgres`). Defaults to `directory`.
|
||||
- **`RESTIC_PASSWORD`** _(required)_ - The encryption password for Restic.
|
||||
- **`RESTIC_REPOSITORY`** _(required)_ - The URI of the Restic repository (e.g., `rest:http://your-rest-server:8000/backup`).
|
||||
- **`RESTIC_REST_USERNAME`** _(optional)_ - The username for REST server authentication.
|
||||
- **`RESTIC_REST_PASSWORD`** _(optional)_ - The password for REST server authentication.
|
||||
- **`GOTIFYHOST`** _(required)_ - The Gotify server URL.
|
||||
- **`GOTIFYTOKEN`** _(required)_ - The API token for Gotify.
|
||||
- **`GOTIFYTOPIC`** _(required)_ - The topic under which backup notifications will be sent.
|
||||
|
||||
### Directory Backup
|
||||
|
||||
When running in `directory` mode, the following variable must be set:
|
||||
|
||||
- **`SOURCEDIR`** _(required)_ - The path of the directory to be backed up.
|
||||
|
||||
### PostgreSQL Backup
|
||||
|
||||
For `postgres` mode, the following database-related variables are required:
|
||||
|
||||
- **`PGHOST`** _(required)_ - The hostname of the PostgreSQL server.
|
||||
- **`PGDATABASE`** _(required)_ - The name of the database to back up.
|
||||
- **`PGUSER`** _(required)_ - The PostgreSQL username.
|
||||
- **`PGPORT`** _(optional)_ - The port for PostgreSQL (defaults to `5432`).
|
||||
- **`PGPASSWORD`** _(optional)_ - The password for authentication. Setting this prevents interactive prompts.
|
||||
- **`PG_DUMP_ARGS`** _(optional)_ - Additional flags for `pg_dump`.
|
||||
|
||||
## Dependencies
|
||||
|
||||
Ensure the following commands are available in the container:
|
||||
|
||||
- `restic`
|
||||
- `curl`
|
||||
- `jq`
|
||||
- `pg_dump` _(only required for `postgres` mode)_
|
||||
|
||||
## Usage
|
||||
|
||||
Example Kubernetes CronJob manifest for running BackupSidecar as a cron job for directory backups in minimal configuration:
|
||||
|
||||
```yaml
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: backupsidecar-cron
|
||||
namespace: authentik
|
||||
spec:
|
||||
schedule: "0 7 * * *"
|
||||
concurrencyPolicy: Forbid
|
||||
successfulJobsHistoryLimit: 5
|
||||
failedJobsHistoryLimit: 3
|
||||
jobTemplate:
|
||||
spec:
|
||||
backoffLimit: 3
|
||||
activeDeadlineSeconds: 300
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: OnFailure
|
||||
containers:
|
||||
- name: backupsidecar
|
||||
image: backupsidecar:latest
|
||||
env:
|
||||
- name: RESTIC_REPOSITORY
|
||||
value: "rest:http://rest-server:8000/backup"
|
||||
- name: RESTIC_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: backupsidecar-secret
|
||||
key: restic_password
|
||||
- name: BACKUP_MODE
|
||||
value: "directory" # or "postgres"
|
||||
- name: SOURCEDIR
|
||||
value: "/data/source"
|
||||
- name: GOTIFYHOST
|
||||
value: "http://gotify.example.com"
|
||||
- name: GOTIFYTOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: backupsidecar-secret
|
||||
key: gotify_token
|
||||
- name: GOTIFYTOPIC
|
||||
value: "Backup Notification"
|
||||
# (For PostgreSQL mode, add PGHOST, PGDATABASE, PGUSER, PGPORT, PGPASSWORD)
|
||||
volumeMounts:
|
||||
- name: source-data
|
||||
mountPath: /data/source
|
||||
restartPolicy: OnFailure
|
||||
volumes:
|
||||
- name: source-data
|
||||
persistentVolumeClaim:
|
||||
claimName: source-data-pvc
|
||||
```
|
||||
|
||||
## Notifications
|
||||
|
||||
The script sends success or failure notifications via Gotify.
|
||||
|
||||
Example success notification:
|
||||
|
||||
```
|
||||
Backup successful. Snapshot 56ff6a909a44e01f67d2d88f9a76aa713d437809d7ed14a2361e28893f38befb: files new: 1, files changed: 0, data added: 1019 bytes in 0.277535184 sec
|
||||
```
|
||||
|
||||
90
bun.lock
Normal file
90
bun.lock
Normal file
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "backupsidecar",
|
||||
"dependencies": {
|
||||
"env-var": "^7.5.0",
|
||||
"pino": "^9.9.0",
|
||||
"pino-pretty": "^13.1.1",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/bun": ["@types/bun@1.2.21", "", { "dependencies": { "bun-types": "1.2.21" } }, "sha512-NiDnvEqmbfQ6dmZ3EeUO577s4P5bf4HCTXtI6trMc6f6RzirY5IrF3aIookuSpyslFzrnvv2lmEWv5HyC1X79A=="],
|
||||
|
||||
"@types/node": ["@types/node@24.3.0", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.12", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-cMoR+FoAf/Jyq6+Df2/Z41jISvGZZ2eTlnsaJRptmZ76Caldwy1odD4xTr/gNV9VLj0AWgg/nmkevIyUfIIq5w=="],
|
||||
|
||||
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.2.21", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-sa2Tj77Ijc/NTLS0/Odjq/qngmEPZfbfnOERi0KRUYhT9R8M4VBioWVmMWE5GrYbKMc+5lVybXygLdibHaqVqw=="],
|
||||
|
||||
"colorette": ["colorette@2.0.20", "", {}, "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"dateformat": ["dateformat@4.6.3", "", {}, "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA=="],
|
||||
|
||||
"end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="],
|
||||
|
||||
"env-var": ["env-var@7.5.0", "", {}, "sha512-mKZOzLRN0ETzau2W2QXefbFjo5EF4yWq28OyKb9ICdeNhHJlOE/pHHnz4hdYJ9cNZXcJHo5xN4OT4pzuSHSNvA=="],
|
||||
|
||||
"fast-copy": ["fast-copy@3.0.2", "", {}, "sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ=="],
|
||||
|
||||
"fast-redact": ["fast-redact@3.5.0", "", {}, "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A=="],
|
||||
|
||||
"fast-safe-stringify": ["fast-safe-stringify@2.1.1", "", {}, "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA=="],
|
||||
|
||||
"help-me": ["help-me@5.0.0", "", {}, "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg=="],
|
||||
|
||||
"joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="],
|
||||
|
||||
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
|
||||
|
||||
"on-exit-leak-free": ["on-exit-leak-free@2.1.2", "", {}, "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA=="],
|
||||
|
||||
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||
|
||||
"pino": ["pino@9.9.0", "", { "dependencies": { "atomic-sleep": "^1.0.0", "fast-redact": "^3.1.1", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pino-std-serializers": "^7.0.0", "process-warning": "^5.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", "sonic-boom": "^4.0.1", "thread-stream": "^3.0.0" }, "bin": { "pino": "bin.js" } }, "sha512-zxsRIQG9HzG+jEljmvmZupOMDUQ0Jpj0yAgE28jQvvrdYTlEaiGwelJpdndMl/MBuRr70heIj83QyqJUWaU8mQ=="],
|
||||
|
||||
"pino-abstract-transport": ["pino-abstract-transport@2.0.0", "", { "dependencies": { "split2": "^4.0.0" } }, "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw=="],
|
||||
|
||||
"pino-pretty": ["pino-pretty@13.1.1", "", { "dependencies": { "colorette": "^2.0.7", "dateformat": "^4.6.3", "fast-copy": "^3.0.2", "fast-safe-stringify": "^2.1.1", "help-me": "^5.0.0", "joycon": "^3.1.1", "minimist": "^1.2.6", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pump": "^3.0.0", "secure-json-parse": "^4.0.0", "sonic-boom": "^4.0.1", "strip-json-comments": "^5.0.2" }, "bin": { "pino-pretty": "bin.js" } }, "sha512-TNNEOg0eA0u+/WuqH0MH0Xui7uqVk9D74ESOpjtebSQYbNWJk/dIxCXIxFsNfeN53JmtWqYHP2OrIZjT/CBEnA=="],
|
||||
|
||||
"pino-std-serializers": ["pino-std-serializers@7.0.0", "", {}, "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA=="],
|
||||
|
||||
"process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="],
|
||||
|
||||
"pump": ["pump@3.0.3", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA=="],
|
||||
|
||||
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
|
||||
|
||||
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
|
||||
|
||||
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||
|
||||
"secure-json-parse": ["secure-json-parse@4.0.0", "", {}, "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA=="],
|
||||
|
||||
"sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="],
|
||||
|
||||
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
|
||||
|
||||
"strip-json-comments": ["strip-json-comments@5.0.3", "", {}, "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw=="],
|
||||
|
||||
"thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
|
||||
|
||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
||||
|
||||
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||
}
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: backup-sidecar
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: backup-sidecar
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: backup-sidecar
|
||||
spec:
|
||||
containers:
|
||||
- name: backup-sidecar
|
||||
image: gitea.t000-n.de/t.behrendt/backupsidecar:latest
|
||||
resources:
|
||||
requests:
|
||||
cpu: 128m
|
||||
memory: 128Mi
|
||||
limits:
|
||||
cpu: 256m
|
||||
memory: 1024Mi
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/toBackup
|
||||
name: data
|
||||
readOnly: true
|
||||
- mountPath: /mnt/backups/gitea
|
||||
name: nfs-backup
|
||||
subPath: gitea
|
||||
env:
|
||||
- name: INTERVAL
|
||||
value: "15 14 * * *"
|
||||
- name: RESTIC_REPOSITORY
|
||||
value: /mnt/backups/gitea
|
||||
- name: SOURCEDIR
|
||||
value: "/mnt/toBackup/"
|
||||
- name: KEEPLAST
|
||||
value: "100"
|
||||
- name: RESTIC_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: restic-credentials
|
||||
key: password
|
||||
- name: RUNONSTART
|
||||
value: "false"
|
||||
- name: GOTIFYHOST
|
||||
value: "https://<gotify-URL>"
|
||||
- name: GOTIFYTOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: gotify-credentials
|
||||
key: token
|
||||
- name: GOTIFYTOPIC
|
||||
value: "gitea"
|
||||
volumes:
|
||||
- name: data
|
||||
persistentVolumeClaim:
|
||||
claimName: data
|
||||
- name: nfs-backup
|
||||
persistentVolumeClaim:
|
||||
claimName: backup-nfs
|
||||
17
package.json
Normal file
17
package.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "backupsidecar",
|
||||
"module": "src/main.ts",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
},
|
||||
"dependencies": {
|
||||
"env-var": "^7.5.0",
|
||||
"pino": "^9.9.0",
|
||||
"pino-pretty": "^13.1.1"
|
||||
}
|
||||
}
|
||||
3
renovate.json
Normal file
3
renovate.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json"
|
||||
}
|
||||
212
src/backup.sh
212
src/backup.sh
@@ -1,40 +1,188 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
GOTIFYURL="$GOTIFYHOST/message?token=$GOTIFYTOKEN"
|
||||
#######################################
|
||||
# Determine backup mode from the environment only.
|
||||
# Valid values: "directory" or "postgres".
|
||||
# Default to "directory" if not provided.
|
||||
#######################################
|
||||
BACKUP_MODE="${BACKUP_MODE:-directory}"
|
||||
|
||||
echo "$(date +"%Y-%m-%dT%T") - Starting backup"
|
||||
|
||||
restic backup \
|
||||
--verbose \
|
||||
$SOURCEDIR
|
||||
|
||||
RESTIC_BACKUP_RETURN=$?
|
||||
|
||||
if [ $RESTIC_BACKUP_RETURN -eq 0 ]; then
|
||||
MSG_BACKUP_SUCCESS="Backup successful"
|
||||
echo "$(date +"%Y-%m-%dT%T") - $MSG_BACKUP_SUCCESS"
|
||||
curl -s -X POST "$GOTIFYURL" -F "title=$GOTIFYTOPIC" -F "message=$MSG_BACKUP_SUCCESS"
|
||||
else
|
||||
MSG_BACKUP_ERR="Backup failed with error code $RESTIC_BACKUP_RETURN"
|
||||
echo "$(date +"%Y-%m-%dT%T") - MSG_BACKUP_ERR"
|
||||
curl -s -X POST "$GOTIFYURL" -F "title=$GOTIFYTOPIC" -F "message=$MSG_BACKUP_ERR"
|
||||
exit $RESTIC_BACKUP_RETURN
|
||||
#######################################
|
||||
# Check for required external commands.
|
||||
#######################################
|
||||
REQUIRED_CMDS=(restic curl jq)
|
||||
if [ "$BACKUP_MODE" = "postgres" ]; then
|
||||
REQUIRED_CMDS+=(pg_dump)
|
||||
fi
|
||||
|
||||
for cmd in "${REQUIRED_CMDS[@]}"; do
|
||||
if ! command -v "$cmd" &>/dev/null; then
|
||||
echo "Error: Required command '$cmd' is not installed." >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
MSG_PURGE_START="$(date +"%Y-%m-%dT%T") - Removing old backups"
|
||||
echo $MSG_PURGE_START
|
||||
#######################################
|
||||
# Validate common required environment variables.
|
||||
#######################################
|
||||
# Gotify notification settings.
|
||||
: "${GOTIFYHOST:?Environment variable GOTIFYHOST is not set}"
|
||||
: "${GOTIFYTOKEN:?Environment variable GOTIFYTOKEN is not set}"
|
||||
: "${GOTIFYTOPIC:?Environment variable GOTIFYTOPIC is not set}"
|
||||
|
||||
restic forget --keep-last $KEEPLAST --prune
|
||||
RESTIC_PURGE_RETURN=$?
|
||||
# Restic encryption password.
|
||||
: "${RESTIC_PASSWORD:?Environment variable RESTIC_PASSWORD is not set}"
|
||||
|
||||
if [ $RESTIC_PURGE_RETURN -eq 0 ]; then
|
||||
echo "$(date +"%Y-%m-%dT%T") - Purge successful"
|
||||
else
|
||||
MSG_PURGE_ERR="Purge failed with error code $MSG_PURGE_ERR"
|
||||
echo "$(date +"%Y-%m-%dT%T") - $MSG_PURGE_ERR"
|
||||
curl -s -X POST "$GOTIFYURL" -F "title=$GOTIFYTOPIC" -F "message=$MSG_PURGE_ERR"
|
||||
exit $RESTIC_PURGE_RETURN
|
||||
fi
|
||||
# Use the repository URI directly from the environment.
|
||||
# Example: export RESTIC_REPOSITORY="rest:http://your-rest-server:8000/backup"
|
||||
: "${RESTIC_REPOSITORY:?Environment variable RESTIC_REPOSITORY is not set}"
|
||||
|
||||
echo "$(date +"%Y-%m-%dT%T") - Going back to sleep..."
|
||||
#######################################
|
||||
# Validate mode-specific environment variables.
|
||||
#######################################
|
||||
case "$BACKUP_MODE" in
|
||||
directory)
|
||||
: "${SOURCEDIR:?Environment variable SOURCEDIR is not set (required for directory backup mode)}"
|
||||
;;
|
||||
postgres)
|
||||
: "${PGHOST:?Environment variable PGHOST is not set (required for PostgreSQL backup mode)}"
|
||||
: "${PGDATABASE:?Environment variable PGDATABASE is not set (required for PostgreSQL backup mode)}"
|
||||
: "${PGUSER:?Environment variable PGUSER is not set (required for PostgreSQL backup mode)}"
|
||||
# Optional: default PGPORT to 5432.
|
||||
: "${PGPORT:=5432}"
|
||||
if [ -z "${PGPASSWORD:-}" ]; then
|
||||
echo "Warning: Environment variable PGPASSWORD is not set. pg_dump may fail if authentication is required."
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown backup mode '$BACKUP_MODE'. Valid modes are 'directory' and 'postgres'." >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
#######################################
|
||||
# Build the Gotify URL.
|
||||
#######################################
|
||||
GOTIFYURL="${GOTIFYHOST}/message?token=${GOTIFYTOKEN}"
|
||||
|
||||
#######################################
|
||||
# Date format for logging.
|
||||
#######################################
|
||||
LOG_DATE_FORMAT="%Y-%m-%dT%T"
|
||||
|
||||
#######################################
|
||||
# Log a message with a timestamp.
|
||||
# Arguments:
|
||||
# Message to log.
|
||||
#######################################
|
||||
log() {
|
||||
echo "$(date +"$LOG_DATE_FORMAT") - $*"
|
||||
}
|
||||
|
||||
#######################################
|
||||
# Send a notification via Gotify.
|
||||
# Arguments:
|
||||
# message: The message to send.
|
||||
#######################################
|
||||
send_notification() {
|
||||
local message="$1"
|
||||
if ! curl -s -X POST "$GOTIFYURL" -F "title=${GOTIFYTOPIC}" -F "message=${message}" >/dev/null; then
|
||||
log "Warning: Failed to send notification with message: ${message}"
|
||||
fi
|
||||
}
|
||||
|
||||
#######################################
|
||||
# Run the backup using restic.
|
||||
# The --no-cache flag disables local caching.
|
||||
# Arguments:
|
||||
# $1 - The source directory to back up.
|
||||
#######################################
|
||||
run_restic_backup() {
|
||||
local source_dir="$1"
|
||||
cd "${source_dir}"
|
||||
log "Starting backup of '${source_dir}' to repository ${RESTIC_REPOSITORY}"
|
||||
# Capture both stdout and stderr in a variable
|
||||
backup_output=$(restic -r "${RESTIC_REPOSITORY}" backup --no-cache --json --verbose . 2>&1)
|
||||
# Optionally, also print the output to the console:
|
||||
echo "$backup_output"
|
||||
# Parse the JSON lines output for the summary message
|
||||
summary=$(echo "$backup_output" | jq -r 'select(.message_type=="summary") | "Snapshot " + (.snapshot_id // "none") + ": " + "files new: " + (.files_new|tostring) + ", files changed: " + (.files_changed|tostring) + ", data added: " + (.data_added|tostring) + " bytes in " + (.total_duration|tostring) + " sec"')
|
||||
# Check exit code of restic backup (assuming restic exits non-zero on error)
|
||||
if [ $? -eq 0 ]; then
|
||||
msg="Backup successful. $summary"
|
||||
log "$msg"
|
||||
send_notification "$msg"
|
||||
else
|
||||
exit_code=$?
|
||||
msg="Backup failed with error code ${exit_code}. $backup_output"
|
||||
log "$msg"
|
||||
send_notification "$msg"
|
||||
exit "$exit_code"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
#######################################
|
||||
# Backup a directory (regular mode).
|
||||
#######################################
|
||||
backup_directory() {
|
||||
run_restic_backup "${SOURCEDIR}"
|
||||
}
|
||||
|
||||
#######################################
|
||||
# Backup a PostgreSQL database.
|
||||
# Dumps the database to a temporary directory and then backs it up.
|
||||
#######################################
|
||||
backup_postgres() {
|
||||
log "Starting PostgreSQL backup for database '${PGDATABASE}' on host '${PGHOST}'"
|
||||
|
||||
# Create a temporary directory for the database dump.
|
||||
TEMP_BACKUP_DIR=$(mktemp -d)
|
||||
log "Created temporary directory: ${TEMP_BACKUP_DIR}"
|
||||
|
||||
local dump_file="${TEMP_BACKUP_DIR}/dump.sql"
|
||||
log "Dumping PostgreSQL database to ${dump_file}..."
|
||||
if pg_dump -h "${PGHOST}" -p "${PGPORT}" -U "${PGUSER}" ${PG_DUMP_ARGS:-} "${PGDATABASE}" > "${dump_file}"; then
|
||||
log "Database dump created successfully."
|
||||
else
|
||||
local exit_code=$?
|
||||
local msg="PostgreSQL dump failed with error code ${exit_code}"
|
||||
log "$msg"
|
||||
send_notification "$msg"
|
||||
exit "$exit_code"
|
||||
fi
|
||||
|
||||
# Back up the directory containing the dump.
|
||||
run_restic_backup "${TEMP_BACKUP_DIR}"
|
||||
}
|
||||
|
||||
#######################################
|
||||
# Cleanup temporary resources.
|
||||
#######################################
|
||||
cleanup() {
|
||||
if [ -n "${TEMP_BACKUP_DIR:-}" ] && [ -d "${TEMP_BACKUP_DIR}" ]; then
|
||||
rm -rf "${TEMP_BACKUP_DIR}"
|
||||
log "Removed temporary directory ${TEMP_BACKUP_DIR}"
|
||||
fi
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
#######################################
|
||||
# Main routine.
|
||||
#######################################
|
||||
main() {
|
||||
case "$BACKUP_MODE" in
|
||||
directory)
|
||||
backup_directory
|
||||
;;
|
||||
postgres)
|
||||
backup_postgres
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Trap termination signals to log and exit cleanly.
|
||||
trap 'log "Script interrupted. Exiting."; exit 1' SIGINT SIGTERM
|
||||
|
||||
main
|
||||
|
||||
33
src/backup.ts
Normal file
33
src/backup.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import {
|
||||
BACKUP_MODE,
|
||||
globalLogger,
|
||||
GOTIFY_HOST,
|
||||
GOTIFY_TOKEN,
|
||||
GOTIFY_TOPIC,
|
||||
} from "./env";
|
||||
import { directoryBackup } from "./directoryBackup";
|
||||
import { postgresBackup } from "./postgresBackup";
|
||||
import { gotifyClientFactory } from "./gotify";
|
||||
import { createBackupContext, reHomeContext } from "./backupContext";
|
||||
|
||||
export default async function backup() {
|
||||
const context = createBackupContext(
|
||||
"backup",
|
||||
BACKUP_MODE,
|
||||
globalLogger,
|
||||
gotifyClientFactory(GOTIFY_HOST, GOTIFY_TOKEN, GOTIFY_TOPIC)
|
||||
);
|
||||
context.logger.debug("Starting backup");
|
||||
|
||||
switch (BACKUP_MODE) {
|
||||
case "directory":
|
||||
context.logger.debug("Starting directory backup");
|
||||
return await directoryBackup(reHomeContext(context, "directoryBackup"));
|
||||
case "postgres":
|
||||
context.logger.debug("Starting postgres backup");
|
||||
return await postgresBackup(reHomeContext(context, "postgresBackup"));
|
||||
default:
|
||||
context.logger.error("Invalid backup mode");
|
||||
throw new Error("Invalid backup mode");
|
||||
}
|
||||
}
|
||||
33
src/backupContext.ts
Normal file
33
src/backupContext.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { Logger } from "pino";
|
||||
import { type NotificationClient } from "./gotify";
|
||||
|
||||
export interface BackupContext {
|
||||
logger: Logger;
|
||||
notificationClient: NotificationClient;
|
||||
resticRepository: string;
|
||||
}
|
||||
|
||||
export function createBackupContext(
|
||||
module: string,
|
||||
resticRepository: string,
|
||||
globalLogger: Logger,
|
||||
notificationClient: NotificationClient
|
||||
): BackupContext {
|
||||
const logger = globalLogger.child({ module });
|
||||
|
||||
return {
|
||||
logger,
|
||||
notificationClient,
|
||||
resticRepository,
|
||||
};
|
||||
}
|
||||
|
||||
export function reHomeContext(
|
||||
context: BackupContext,
|
||||
module: string
|
||||
): BackupContext {
|
||||
return {
|
||||
...context,
|
||||
logger: context.logger.child({ module }),
|
||||
};
|
||||
}
|
||||
100
src/backupUtils.ts
Normal file
100
src/backupUtils.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import type { BackupContext } from "./backupContext";
|
||||
|
||||
export function parseResticSummary(output: string): string | null {
|
||||
try {
|
||||
const lines = output.split("\n").filter((line) => line.trim());
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const parsed = JSON.parse(line);
|
||||
if (parsed.message_type === "summary") {
|
||||
return `Snapshot ${parsed.snapshot_id || "none"}: files new: ${
|
||||
parsed.files_new || 0
|
||||
}, files changed: ${parsed.files_changed || 0}, data added: ${
|
||||
parsed.data_added || 0
|
||||
} bytes in ${parsed.total_duration || 0} sec`;
|
||||
}
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Failed to parse restic output: ${error}`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export function runResticBackup(
|
||||
sourceDir: string,
|
||||
context: BackupContext
|
||||
): { success: boolean; output: string; summary: string | null } {
|
||||
const { logger, resticRepository } = context;
|
||||
|
||||
logger.info(
|
||||
`Starting backup of '${sourceDir}' to repository ${resticRepository}`
|
||||
);
|
||||
|
||||
const result = Bun.spawnSync(
|
||||
[
|
||||
"restic",
|
||||
"-r",
|
||||
resticRepository,
|
||||
"backup",
|
||||
"--no-cache",
|
||||
"--json",
|
||||
"--verbose",
|
||||
".",
|
||||
],
|
||||
{
|
||||
cwd: sourceDir,
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
}
|
||||
);
|
||||
|
||||
const output = result.stdout?.toString() + result.stderr?.toString() || "";
|
||||
const success = result.success;
|
||||
const summary = parseResticSummary(output);
|
||||
|
||||
return { success, output, summary };
|
||||
}
|
||||
|
||||
export async function executeBackup(
|
||||
backupType: string,
|
||||
backupFn: () => Promise<{
|
||||
success: boolean;
|
||||
output: string;
|
||||
summary: string | null;
|
||||
}>,
|
||||
context: BackupContext
|
||||
): Promise<void> {
|
||||
const { logger, notificationClient } = context;
|
||||
|
||||
try {
|
||||
logger.info(`Starting ${backupType} backup process`);
|
||||
|
||||
const { success, output, summary } = await backupFn();
|
||||
|
||||
console.log(output);
|
||||
|
||||
if (success) {
|
||||
const message = `${backupType} backup successful. ${
|
||||
summary || "No summary available"
|
||||
}`;
|
||||
logger.info(message);
|
||||
await notificationClient.sendNotification(message);
|
||||
} else {
|
||||
const message = `${backupType} backup failed: ${
|
||||
summary || "Unknown error"
|
||||
}`;
|
||||
logger.error(message);
|
||||
await notificationClient.sendNotification(message);
|
||||
throw new Error(`${backupType} backup failed: ${message}`);
|
||||
}
|
||||
|
||||
logger.info(`${backupType} backup completed successfully`);
|
||||
} catch (error) {
|
||||
const errorMessage = `${backupType} backup failed: ${error}`;
|
||||
logger.error(errorMessage);
|
||||
await notificationClient.sendNotification(errorMessage);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
13
src/directoryBackup.ts
Normal file
13
src/directoryBackup.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { SOURCEDIR } from "./env";
|
||||
import { executeBackup, runResticBackup } from "./backupUtils";
|
||||
import type { BackupContext } from "./backupContext";
|
||||
|
||||
export async function directoryBackup(context: BackupContext): Promise<void> {
|
||||
await executeBackup(
|
||||
"Directory",
|
||||
async () => {
|
||||
return runResticBackup(SOURCEDIR, context);
|
||||
},
|
||||
context
|
||||
);
|
||||
}
|
||||
18
src/entry.sh
18
src/entry.sh
@@ -1,18 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
mkdir /etc/cron.d
|
||||
touch /etc/cron.d/backup
|
||||
echo "$INTERVAL /bin/sh /app/backup.sh" > /etc/cron.d/backup
|
||||
|
||||
# change ownership and make the cron known to crontab
|
||||
chmod 0644 /etc/cron.d/backup && crontab /etc/cron.d/backup
|
||||
|
||||
|
||||
if [ $RUNONSTART = 'true' ]; then
|
||||
echo $(date +"%Y-%m-%dT%T") "- Running initial backup"
|
||||
/bin/sh /app/backup.sh
|
||||
fi
|
||||
|
||||
# Wait until infinity
|
||||
echo $(date +"%Y-%m-%dT%T") "- Starting cron"
|
||||
crond -f
|
||||
61
src/env.ts
Normal file
61
src/env.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { from } from "env-var";
|
||||
import pino from "pino";
|
||||
|
||||
const initialEnv = from(process.env, {});
|
||||
const LOG_LEVEL = initialEnv
|
||||
.get("LOG_LEVEL")
|
||||
.default("info")
|
||||
.asEnum(["fatal", "error", "warn", "info", "debug", "trace"]);
|
||||
|
||||
export const globalLogger = pino({
|
||||
level: LOG_LEVEL,
|
||||
transport: {
|
||||
target: "pino-pretty",
|
||||
options: {
|
||||
colorize: true,
|
||||
translateTime: "SYS:standard",
|
||||
ignore: "pid,hostname",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const env = from(process.env, {}, (msg: string) => globalLogger.info(msg));
|
||||
|
||||
export const BACKUP_MODE = env
|
||||
.get("BACKUP_MODE")
|
||||
.required()
|
||||
.asEnum(["directory", "postgres"]);
|
||||
|
||||
export const GOTIFY_HOST = env.get("GOTIFY_HOST").required().asUrlString();
|
||||
export const GOTIFY_TOKEN = env.get("GOTIFY_TOKEN").required().asString();
|
||||
export const GOTIFY_TOPIC = env.get("GOTIFY_TOPIC").required().asString();
|
||||
|
||||
export const RESTIC_PASSWORD = env.get("RESTIC_PASSWORD").required().asString();
|
||||
export const RESTIC_REPOSITORY = env
|
||||
.get("RESTIC_REPOSITORY")
|
||||
.required()
|
||||
.asString();
|
||||
export const RESTIC_REST_USERNAME = env.get("RESTIC_REST_USERNAME").asString();
|
||||
export const RESTIC_REST_PASSWORD = env.get("RESTIC_REST_PASSWORD").asString();
|
||||
|
||||
export const SOURCEDIR = env
|
||||
.get("SOURCEDIR")
|
||||
.required(BACKUP_MODE === "directory")
|
||||
.asString();
|
||||
|
||||
export const PGDATABASE = env
|
||||
.get("PGDATABASE")
|
||||
.required(BACKUP_MODE === "postgres")
|
||||
.asString();
|
||||
export const PGHOST = env
|
||||
.get("PGHOST")
|
||||
.required(BACKUP_MODE === "postgres")
|
||||
.asString();
|
||||
export const PGUSER = env
|
||||
.get("PGUSER")
|
||||
.required(BACKUP_MODE === "postgres")
|
||||
.asString();
|
||||
export const PGPORT = env
|
||||
.get("PGPORT")
|
||||
.required(BACKUP_MODE === "postgres")
|
||||
.asString();
|
||||
23
src/gotify.ts
Normal file
23
src/gotify.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
export interface NotificationClient {
|
||||
sendNotification(message: string): Promise<void>;
|
||||
}
|
||||
|
||||
export function gotifyClientFactory(
|
||||
gotifyHost: string,
|
||||
gotifyToken: string,
|
||||
gotifyTopic: string
|
||||
): NotificationClient {
|
||||
const sendNotification = async (message: string) => {
|
||||
await fetch(`${gotifyHost}/message?token=${gotifyToken}`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
title: gotifyTopic,
|
||||
message: message,
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
sendNotification,
|
||||
};
|
||||
}
|
||||
15
src/main.ts
Normal file
15
src/main.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { globalLogger } from "./env";
|
||||
import backup from "./backup";
|
||||
|
||||
const logger = globalLogger.child({ module: "main" });
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
logger.info("Starting backup application");
|
||||
await backup();
|
||||
logger.info("Backup application completed successfully");
|
||||
} catch (error) {
|
||||
logger.error(`Backup application failed: ${error}`);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
||||
78
src/postgresBackup.ts
Normal file
78
src/postgresBackup.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { writeFileSync, mkdtempSync, rmSync } from "fs";
|
||||
import { join } from "path";
|
||||
import { tmpdir } from "os";
|
||||
import { PGHOST, PGDATABASE, PGUSER, PGPORT } from "./env";
|
||||
import { executeBackup, runResticBackup } from "./backupUtils";
|
||||
import type { BackupContext } from "./backupContext";
|
||||
|
||||
function dumpPostgresDatabase(context: BackupContext): {
|
||||
success: boolean;
|
||||
tempDir: string;
|
||||
dumpFile: string;
|
||||
} {
|
||||
const { logger } = context;
|
||||
|
||||
const tempDir = mkdtempSync(join(tmpdir(), "postgres-backup-"));
|
||||
const dumpFile = join(tempDir, "dump.sql");
|
||||
|
||||
logger.info(`Created temporary directory: ${tempDir}`);
|
||||
logger.info(`Dumping PostgreSQL database to ${dumpFile}...`);
|
||||
|
||||
const result = Bun.spawnSync(
|
||||
["pg_dump", "-h", PGHOST, "-p", PGPORT, "-U", PGUSER, PGDATABASE],
|
||||
{
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
}
|
||||
);
|
||||
|
||||
if (result.success) {
|
||||
writeFileSync(dumpFile, result.stdout?.toString() || "");
|
||||
logger.info("Database dump created successfully.");
|
||||
return { success: true, tempDir, dumpFile };
|
||||
} else {
|
||||
logger.error(`PostgreSQL dump failed`);
|
||||
logger.error(`stderr: ${result.stderr?.toString() || ""}`);
|
||||
return { success: false, tempDir, dumpFile };
|
||||
}
|
||||
}
|
||||
|
||||
export async function postgresBackup(context: BackupContext): Promise<void> {
|
||||
let tempDir: string | null = null;
|
||||
|
||||
try {
|
||||
context.logger.info(
|
||||
`Starting PostgreSQL backup for database '${PGDATABASE}' on host '${PGHOST}'`
|
||||
);
|
||||
|
||||
const { success, tempDir: dir, dumpFile } = dumpPostgresDatabase(context);
|
||||
tempDir = dir;
|
||||
|
||||
if (!success) {
|
||||
throw new Error("PostgreSQL dump failed");
|
||||
}
|
||||
|
||||
await executeBackup(
|
||||
"PostgreSQL",
|
||||
async () => {
|
||||
if (!tempDir) {
|
||||
throw new Error("Temporary directory not created");
|
||||
}
|
||||
return runResticBackup(tempDir, context);
|
||||
},
|
||||
context
|
||||
);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
} finally {
|
||||
if (tempDir) {
|
||||
try {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
context.logger.info(`Removed temporary directory ${tempDir}`);
|
||||
} catch (cleanupError) {
|
||||
context.logger.warn(
|
||||
`Failed to cleanup temporary directory ${tempDir}: ${cleanupError}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
29
tsconfig.json
Normal file
29
tsconfig.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "Preserve",
|
||||
"moduleDetection": "force",
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": true,
|
||||
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user