feat: backup

This commit is contained in:
2025-11-23 19:32:22 +08:00
parent c41ef094ea
commit 070fe1b416
6 changed files with 322 additions and 15 deletions

32
.env.example Normal file
View File

@@ -0,0 +1,32 @@
# Database Configuration
DB_HOST=db
DB_PORT=3306
DB_USER=nysoure
DB_PASSWORD=your_secure_password_here
DB_NAME=nysoure
MYSQL_ROOT_PASSWORD=your_secure_root_password_here
MYSQL_DATABASE=nysoure
MYSQL_USER=nysoure
MYSQL_PASSWORD=your_secure_password_here
# Redis Configuration
REDIS_HOST=redis
REDIS_PORT=6379
# Application Configuration
BANNED_REDIRECT_DOMAINS=example.com,example.org
ALLOWED_URL_REGEXPS=
# Backup Configuration
# Object storage configuration (S3-compatible)
S3_ENDPOINT=https://s3.amazonaws.com
S3_BUCKET=nysoure-backups
S3_ACCESS_KEY=your_access_key_here
S3_SECRET_KEY=your_secret_key_here
S3_REGION=us-east-1
# Backup schedule (cron format) - default: daily at 2 AM
BACKUP_SCHEDULE=0 2 * * *
# Retention policy (days)
BACKUP_RETENTION_DAYS=30

2
.gitignore vendored
View File

@@ -3,3 +3,5 @@
test.db
.idea/
build/
temp/
.env

28
Dockerfile.backup Normal file
View File

@@ -0,0 +1,28 @@
FROM alpine:latest
# Install required packages
RUN apk add --no-cache \
bash \
mysql-client \
rclone \
tzdata \
supercronic \
&& rm -rf /var/cache/apk/*
# Set timezone (optional, adjust as needed)
ENV TZ=UTC
# Create backup directory
RUN mkdir -p /backup/local
# Copy backup script
COPY backup.sh /usr/local/bin/backup.sh
RUN chmod +x /usr/local/bin/backup.sh
# Copy entrypoint script
COPY backup-entrypoint.sh /usr/local/bin/backup-entrypoint.sh
RUN chmod +x /usr/local/bin/backup-entrypoint.sh
WORKDIR /backup
ENTRYPOINT ["/usr/local/bin/backup-entrypoint.sh"]

44
backup-entrypoint.sh Normal file
View File

@@ -0,0 +1,44 @@
#!/bin/bash
set -e
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
}
# Default schedule: daily at 2 AM
BACKUP_SCHEDULE="${BACKUP_SCHEDULE:-0 2 * * *}"
log "Backup container starting..."
log "Backup schedule: ${BACKUP_SCHEDULE}"
# Wait for database to be ready
log "Waiting for database to be ready..."
for i in {1..30}; do
if mysql -h ${DB_HOST} -P ${DB_PORT} -u ${DB_USER} -p${DB_PASSWORD} -e "SELECT 1" > /dev/null 2>&1; then
log "Database is ready!"
break
fi
log "Waiting for database... (${i}/30)"
sleep 2
done
# Validate S3 configuration
if [ -z "${S3_BUCKET}" ] || [ -z "${S3_ACCESS_KEY}" ] || [ -z "${S3_SECRET_KEY}" ]; then
log "ERROR: S3 configuration is incomplete!"
log "Please set S3_BUCKET, S3_ACCESS_KEY, and S3_SECRET_KEY environment variables."
exit 1
fi
# Run initial backup
log "Running initial backup..."
/usr/local/bin/backup.sh
# Create crontab
echo "${BACKUP_SCHEDULE} /usr/local/bin/backup.sh >> /var/log/backup.log 2>&1" > /tmp/crontab
log "Starting scheduled backups with supercronic..."
log "Logs will be written to /var/log/backup.log"
# Run supercronic with the crontab
exec supercronic /tmp/crontab

190
backup.sh Normal file
View File

@@ -0,0 +1,190 @@
#!/bin/bash
set -e
# Configuration
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_DIR="/backup/local"
APP_DATA_DIR="/backup/app_data"
BACKUP_DATE=$(date +%Y-%m-%d)
# S3 configuration
S3_ENDPOINT="${S3_ENDPOINT}"
S3_BUCKET="${S3_BUCKET}"
S3_ACCESS_KEY="${S3_ACCESS_KEY}"
S3_SECRET_KEY="${S3_SECRET_KEY}"
S3_REGION="${S3_REGION:-us-east-1}"
# Retention
RETENTION_DAYS="${BACKUP_RETENTION_DAYS:-30}"
# State file for incremental backups
STATE_FILE="${BACKUP_DIR}/last_backup_state.txt"
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
}
# Configure rclone for S3
configure_rclone() {
mkdir -p ~/.config/rclone
cat > ~/.config/rclone/rclone.conf <<EOF
[s3]
type = s3
provider = AWS
env_auth = false
access_key_id = ${S3_ACCESS_KEY}
secret_access_key = ${S3_SECRET_KEY}
region = ${S3_REGION}
endpoint = ${S3_ENDPOINT}
acl = private
EOF
}
# Backup database
backup_database() {
log "Starting database backup..."
DB_BACKUP_FILE="${BACKUP_DIR}/db_${TIMESTAMP}.sql.gz"
mysqldump -h ${DB_HOST} -P ${DB_PORT} -u ${DB_USER} -p${DB_PASSWORD} \
--single-transaction \
--quick \
--lock-tables=false \
--databases ${DB_NAME} | gzip > ${DB_BACKUP_FILE}
log "Database backup completed: ${DB_BACKUP_FILE}"
# Upload to S3
rclone copy ${DB_BACKUP_FILE} s3:${S3_BUCKET}/database/ --progress
log "Database backup uploaded to S3"
# Clean up local backup file after successful upload
rm -f ${DB_BACKUP_FILE}
}
# Backup config.json
backup_config() {
log "Backing up config.json..."
if [ -f "${APP_DATA_DIR}/config.json" ]; then
CONFIG_BACKUP="${BACKUP_DIR}/config_${TIMESTAMP}.json"
cp "${APP_DATA_DIR}/config.json" ${CONFIG_BACKUP}
# Upload to S3
rclone copy ${CONFIG_BACKUP} s3:${S3_BUCKET}/config/ --progress
log "Config backup uploaded to S3"
rm -f ${CONFIG_BACKUP}
else
log "Warning: config.json not found"
fi
}
# Incremental backup for images using rclone sync with checksums
backup_images() {
log "Starting incremental image backup..."
# Backup images directory
if [ -d "${APP_DATA_DIR}/images" ]; then
log "Syncing images directory (incremental)..."
# Using rclone sync with --checksum for efficient incremental backup
# Only uploads new or modified files
rclone sync "${APP_DATA_DIR}/images" s3:${S3_BUCKET}/images \
--checksum \
--transfers 8 \
--checkers 16 \
--fast-list \
--progress \
--log-file="${BACKUP_DIR}/images_sync_${TIMESTAMP}.log"
log "Images backup completed"
else
log "Warning: images directory not found"
fi
}
# Backup avatars
backup_avatars() {
log "Starting avatar backup..."
if [ -d "${APP_DATA_DIR}/avatar" ]; then
log "Syncing avatar directory..."
# Avatar directory is usually smaller, but still use incremental sync
rclone sync "${APP_DATA_DIR}/avatar" s3:${S3_BUCKET}/avatar \
--checksum \
--transfers 4 \
--progress \
--log-file="${BACKUP_DIR}/avatar_sync_${TIMESTAMP}.log"
log "Avatar backup completed"
else
log "Warning: avatar directory not found"
fi
}
# Clean up old database backups from S3
cleanup_old_backups() {
log "Cleaning up backups older than ${RETENTION_DAYS} days..."
# Delete old database backups
rclone delete s3:${S3_BUCKET}/database \
--min-age ${RETENTION_DAYS}d \
--progress || true
# Delete old config backups
rclone delete s3:${S3_BUCKET}/config \
--min-age ${RETENTION_DAYS}d \
--progress || true
# Clean up local logs
find ${BACKUP_DIR} -name "*.log" -mtime +7 -delete || true
log "Cleanup completed"
}
# Create backup state file
create_state_file() {
echo "LAST_BACKUP_DATE=${BACKUP_DATE}" > ${STATE_FILE}
echo "LAST_BACKUP_TIMESTAMP=${TIMESTAMP}" >> ${STATE_FILE}
}
# Main backup function
run_backup() {
log "=========================================="
log "Starting backup process..."
log "=========================================="
# Create backup directory
mkdir -p ${BACKUP_DIR}
# Configure rclone
configure_rclone
# Test S3 connection
log "Testing S3 connection..."
if ! rclone lsd s3:${S3_BUCKET} > /dev/null 2>&1; then
log "Error: Cannot connect to S3 bucket. Please check your credentials."
exit 1
fi
# Perform backups
backup_database
backup_config
backup_images
backup_avatars
# Cleanup old backups
cleanup_old_backups
# Update state file
create_state_file
log "=========================================="
log "Backup process completed successfully!"
log "=========================================="
}
# Run backup
run_backup

View File

@@ -10,15 +10,8 @@ services:
depends_on:
- db
- redis
environment:
- DB_HOST=db
- DB_PORT=3306
- DB_USER=nysoure
- DB_PASSWORD=nysoure_password
- DB_NAME=nysoure
- REDIS_HOST=redis
- REDIS_PORT=6379
- BANNED_REDIRECT_DOMAINS=example.com,example.org
env_file:
- .env
restart: unless-stopped
logging:
driver: "json-file"
@@ -30,11 +23,8 @@ services:
image: mariadb:latest
volumes:
- db_data:/var/lib/mysql
environment:
- MYSQL_ROOT_PASSWORD=root_password
- MYSQL_DATABASE=nysoure
- MYSQL_USER=nysoure
- MYSQL_PASSWORD=nysoure_password
env_file:
- .env
restart: unless-stopped
logging:
driver: "json-file"
@@ -51,6 +41,27 @@ services:
max-size: "10m"
max-file: "3"
backup:
build:
context: .
dockerfile: Dockerfile.backup
volumes:
- app_data:/backup/app_data:ro
- db_data:/backup/db_data:ro
- backup_data:/backup/local
depends_on:
- db
- app
env_file:
- .env
restart: unless-stopped
logging:
driver: "json-file"
options:
max-size: "10m"
max-file: "3"
volumes:
app_data:
db_data:
backup_data: