Skip to content

Instantly share code, notes, and snippets.

@salessandri
Created February 24, 2025 04:08
Show Gist options
  • Select an option

  • Save salessandri/ef57644109df14e8c0692cac2bac52ae to your computer and use it in GitHub Desktop.

Select an option

Save salessandri/ef57644109df14e8c0692cac2bac52ae to your computer and use it in GitHub Desktop.
Script tailored to backup to S3 the channels.backup file generated by lnd each time it changes
#!/usr/bin/env python3
"""
LND Channels Backup Service
==========================
A service specifically designed to backup the channels.backup file generated by LND (Lightning
Network Daemon). The service is tailored to LND's update mechanism, where it creates a new
backup file and then moves it to replace the existing channels.backup, ensuring atomic updates.
Features
--------
- Monitors LND's channels.backup file for atomic replacements
- Creates timestamped backups when LND generates a new backup
- Uploads backups to S3 with SHA256 hash suffix
- Handles graceful shutdown on SIGTERM/SIGINT
- Performs initial backup on startup
Configuration
------------
The service is configured through environment variables:
MONITOR_FILE_PATH : str
Full path to LND's channels.backup file
BACKUP_DIRECTORY : str
Path to directory for temporary backup storage
S3_BUCKET_NAME : str
Name of the S3 bucket for storing backups
S3_BACKUP_PREFIX : str
Prefix to use for S3 object keys (e.g., "backups/channels")
Backup Process
-------------
1. When LND generates a new backup:
- Creates a local backup with timestamp (channels.backup_YYYYMMDD_HHMMSS)
- Calculates SHA256 hash of the backup
- Uploads to S3 with key: {prefix}/{backup_name}_{hash[-6:]}
- Removes local backup after successful upload
Dependencies
-----------
- boto3: AWS SDK for S3 operations
- asyncinotify: Async inotify monitoring
- python-dotenv: Environment variable management
Example
-------
To run the service:
1. Set up environment variables (can use .env file)
2. Ensure AWS credentials are configured
3. Run: python backup-channels-file.py
The service will create an initial backup and then monitor for changes until stopped
with SIGTERM or SIGINT.
"""
import asyncio
import hashlib
import os
import signal
import sys
import shutil
import logging
from datetime import datetime
import boto3
from botocore.exceptions import ClientError
from asyncinotify import Inotify, Mask
from dotenv import load_dotenv
def setup_logging():
"""Configure logging to output to stdout."""
log_format = "%(asctime)s - %(levelname)s - %(message)s"
logging.basicConfig(level=logging.INFO, format=log_format, stream=sys.stdout)
return logging.getLogger(__name__)
logger = setup_logging()
class ChannelsBackup:
def __init__(self, file_path, backup_dir, bucket_name, s3_prefix):
self.file_path = os.path.abspath(file_path)
self.backup_dir = backup_dir
self.bucket_name = bucket_name
self.s3_prefix = s3_prefix.strip("/") # Remove trailing/leading slashes
self.s3_client = boto3.client("s3")
self.running = False
self.inotify = Inotify()
self.stop_event = asyncio.Event()
def create_backup(self):
"""Create a backup of the monitored file and upload it to S3."""
try:
# Generate backup filename with timestamp
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
file_name = os.path.basename(self.file_path)
backup_file = f"{file_name}_{timestamp}"
backup_path = os.path.join(self.backup_dir, backup_file)
# Create backup
shutil.copy2(self.file_path, backup_path)
logger.info(f"Created backup: {backup_path}")
# Calculate SHA256 hash for the backup file
with open(backup_path, "rb") as f:
sha256_hash = hashlib.sha256(f.read()).hexdigest()
# Generate S3 object name with hash suffix
s3_key = f"{self.s3_prefix}/{backup_file}_{sha256_hash[-6:]}"
# Upload to S3
self.s3_client.upload_file(backup_path, self.bucket_name, s3_key)
logger.info(f"Uploaded to S3: s3://{self.bucket_name}/{s3_key}")
# Delete local backup after successful upload
os.remove(backup_path)
logger.info(f"Deleted local backup: {backup_path}")
except ClientError as e:
logger.error(f"Error uploading to S3: {e}")
except Exception as e:
logger.error(f"Error creating backup: {e}", exc_info=True)
async def monitor(self):
"""Monitor the file for changes using asyncinotify."""
try:
# Create initial backup
logger.info("Creating initial backup...")
self.create_backup()
self.running = True
logger.info(f"Started monitoring {self.file_path}")
logger.info(f"Backups will be stored in {self.backup_dir}")
logger.info(
f"Files will be uploaded to s3://{self.bucket_name}/{self.s3_prefix}/"
)
# Monitor the directory containing the file
watch_dir = os.path.dirname(self.file_path)
filename = os.path.basename(self.file_path)
# Watch for MOVED_TO events in the directory
self.inotify.add_watch(watch_dir, Mask.MOVED_TO)
# Create a task for waiting the stop signal
stop_task = asyncio.create_task(self.stop_event.wait())
while self.running:
event_task = asyncio.create_task(self.inotify.get())
done, _ = await asyncio.wait(
[event_task, stop_task], return_when=asyncio.FIRST_COMPLETED
)
done_task = done.pop()
if done_task is stop_task: # stop_event was set
continue
# Handle the inotify event
event = done_task.result()
# Check if the moved file is our target file
if event.name.name == filename:
logger.info(f"Detected file change: {event}")
self.create_backup()
except Exception as e:
logger.error(f"Error in monitor loop: {e}", exc_info=True)
finally:
self.inotify.close()
logger.info("Stopped monitoring")
def stop(self):
"""Stop the monitoring process."""
logger.info("Stopping monitoring...")
self.running = False
self.stop_event.set()
async def main():
"""Main entry point for the backup service."""
try:
# Load environment variables
load_dotenv()
# Get configuration from environment variables
file_path = os.getenv("MONITOR_FILE_PATH")
if not file_path:
logger.critical("Environment variable MONITOR_FILE_PATH is not set")
return 1
backup_dir = os.getenv("BACKUP_DIRECTORY")
if not backup_dir:
logger.critical("Environment variable BACKUP_DIRECTORY is not set")
return 1
bucket_name = os.getenv("S3_BUCKET_NAME")
if not bucket_name:
logger.critical("Environment variable S3_BUCKET_NAME is not set")
return 1
s3_prefix = os.getenv("S3_BACKUP_PREFIX")
if not s3_prefix:
logger.critical("Environment variable S3_BACKUP_PREFIX is not set")
return 1
# Verify that the monitored file exists
if not os.path.exists(file_path):
logger.critical(f"File {file_path} does not exist")
return 1
# Verify that the backup directory exists
if not os.path.exists(backup_dir):
logger.critical(f"Directory {backup_dir} does not exist")
return 1
# Create backup service
backup_service = ChannelsBackup(file_path, backup_dir, bucket_name, s3_prefix)
# Set up signal handlers
loop = asyncio.get_running_loop()
for sig in (signal.SIGTERM, signal.SIGINT):
loop.add_signal_handler(sig, backup_service.stop)
# Start monitoring
await backup_service.monitor()
return 0
except Exception as e:
logger.error("Fatal error in main loop", exc_info=True)
return 1
if __name__ == "__main__":
sys.exit(asyncio.run(main()))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment