initial
This commit is contained in:
commit
69b14699af
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
/cookies/*
|
||||||
|
/logs/*
|
||||||
|
/temp/*
|
||||||
|
/old_backup_photos_functions.py
|
||||||
|
|
||||||
|
.env
|
||||||
35
Dockerfile
Normal file
35
Dockerfile
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Use official Python image as base
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
# Install necessary packages
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
git \
|
||||||
|
libmediainfo0v5 \
|
||||||
|
build-essential && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Set environment variables
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
# Create app directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
COPY requirements.txt /app/
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy the backup script
|
||||||
|
COPY backup_photos.py /app/
|
||||||
|
|
||||||
|
# Create directory for pyicloud cookies
|
||||||
|
RUN mkdir -p /app/.pyicloud
|
||||||
|
|
||||||
|
# Ensure log directory exists
|
||||||
|
RUN mkdir -p /var/log
|
||||||
|
|
||||||
|
# Set volumes
|
||||||
|
VOLUME ["/backup/photos", "/mnt/photos_backup", "/app/.pyicloud", "/var/log"]
|
||||||
|
|
||||||
|
# Set entrypoint
|
||||||
|
ENTRYPOINT ["python", "backup_photos.py"]
|
||||||
10
README.md
Normal file
10
README.md
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
The `backup_icloud_photos.sh` downloads photos from iCloud, stores them temporarily on this server, before copying them to the NAS. The backup script makes use of a library called pyicloud. As of ~October 2024, the package has a bug that prevents it from properly authing with iCloud. The referenced commit in the requirements.txt along with the pinned version of `srp` get around this issue. The pyicloud library itself has not been updated in awhile.
|
||||||
|
|
||||||
|
The Immich instance, run from the sister directory `docker`, reads the directory on the NAS contianing these photos as a read-only, external library. Immich can be started by running `docker compose up -d` from the sister `docker` directory
|
||||||
|
|
||||||
|
Immich typically needs to "Scan" the external library to show updates or newly added photos.
|
||||||
|
|
||||||
|
Immich can be updated by running: `docker compose pull && docker compose up -d`. HOWEVER, **Immich recommends reading the release notes** because there can be breaking changes. Immich does not prioritize backward compatibility in their updates.
|
||||||
|
|
||||||
|
[Immich docs](https://immich.app/docs/overview/introduction)
|
||||||
|
[Immich release notes](https://github.com/immich-app/immich/releases)
|
||||||
43
backup_icloud_photos.sh
Executable file
43
backup_icloud_photos.sh
Executable file
@ -0,0 +1,43 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# backup_icloud_photos.sh
|
||||||
|
|
||||||
|
# Exit immediately if a command exits with a non-zero status
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
SCRIPT_DIR="/home/john/mine/scripts/icloud_backup"
|
||||||
|
|
||||||
|
# Load environment variables from .env file
|
||||||
|
if [ -f "$SCRIPT_DIR/.env" ]; then
|
||||||
|
export $(grep -v '^#' "$SCRIPT_DIR/.env" | xargs)
|
||||||
|
else
|
||||||
|
echo ".env file not found in $SCRIPT_DIR/.env!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Define variables for paths
|
||||||
|
IMAGE_NAME="icloud-backup"
|
||||||
|
CONTAINER_NAME="icloud-backup-container"
|
||||||
|
|
||||||
|
# Build the Docker image
|
||||||
|
echo "Building Docker image: $IMAGE_NAME"
|
||||||
|
docker build -f "$SCRIPT_DIR/Dockerfile" -t "$IMAGE_NAME" "$SCRIPT_DIR"
|
||||||
|
|
||||||
|
# Run the Docker container
|
||||||
|
echo "Running Docker container: $CONTAINER_NAME"
|
||||||
|
|
||||||
|
docker run --rm -it \
|
||||||
|
--name $CONTAINER_NAME \
|
||||||
|
--network immich_network \
|
||||||
|
-e "ICLOUD_USERNAME=$ICLOUD_USERNAME" \
|
||||||
|
-e "ICLOUD_PASSWORD=$ICLOUD_PASSWORD" \
|
||||||
|
-e "IMMICH_API_KEY=$IMMICH_API_KEY" \
|
||||||
|
-e "NAS_IMMICH_LIBRARY_ID=$NAS_IMMICH_LIBRARY_ID" \
|
||||||
|
-v "$SCRIPT_DIR/temp:/backup/temp" \
|
||||||
|
-v /mnt/photos_backup:/mnt/photos_backup \
|
||||||
|
-v "$SCRIPT_DIR/cookies:/app/.pyicloud" \
|
||||||
|
-v "$SCRIPT_DIR/logs:/var/log" \
|
||||||
|
$IMAGE_NAME
|
||||||
|
|
||||||
|
echo "Backup process completed successfully."
|
||||||
302
backup_photos.py
Normal file
302
backup_photos.py
Normal file
@ -0,0 +1,302 @@
|
|||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
import piexif
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
from pyicloud import PyiCloudService
|
||||||
|
from pyicloud.exceptions import PyiCloudFailedLoginException, PyiCloudAPIResponseException
|
||||||
|
|
||||||
|
|
||||||
|
OVERWRITE = False
|
||||||
|
IMMICH_API_KEY = os.getenv("IMMICH_API_KEY")
|
||||||
|
NAS_IMMICH_LIBRARY_ID = os.getenv("NAS_IMMICH_LIBRARY_ID")
|
||||||
|
|
||||||
|
program_start_time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||||
|
log_file = f"/var/log/icloud_backup_{program_start_time}.log"
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||||
|
handlers=[
|
||||||
|
logging.FileHandler(log_file),
|
||||||
|
logging.StreamHandler(sys.stdout)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def authenticate_icloud(username, password, cookie_dir):
|
||||||
|
try:
|
||||||
|
api = PyiCloudService(username, password, cookie_directory=cookie_dir)
|
||||||
|
except PyiCloudFailedLoginException as e:
|
||||||
|
logging.error("Failed to authenticate to iCloud: %s", e)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if api.requires_2fa:
|
||||||
|
logging.info("Two factor auth required.")
|
||||||
|
code = input("Enter the 2fa code: ")
|
||||||
|
if not api.validate_2fa_code(code):
|
||||||
|
logging.error("Invalid 2fa code.")
|
||||||
|
sys.exit(1)
|
||||||
|
if api.requires_2sa:
|
||||||
|
logging.info("Two step authentication enabled")
|
||||||
|
security_code = input("Enter the security code: ")
|
||||||
|
if not api.validate_2sa_code(security_code):
|
||||||
|
logging.error("Invalid security code.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return api
|
||||||
|
|
||||||
|
def refresh_icloud_session(api):
|
||||||
|
try:
|
||||||
|
api.refresh()
|
||||||
|
logging.info("iCloud session refreshed successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Failed to refresh iCloud session: {e}")
|
||||||
|
raise e
|
||||||
|
|
||||||
|
def ensure_unique_filename(photo, nas_dir):
|
||||||
|
"""
|
||||||
|
Ensures the photo on the NAS has a unique filename based on the photo's ID.
|
||||||
|
If a file with the same name already exists, it is renamed
|
||||||
|
Returns the updated file path (using the ID)
|
||||||
|
"""
|
||||||
|
# Original file storage pattern
|
||||||
|
original_name = photo.filename
|
||||||
|
base_name, ext = os.path.splitext(original_name)
|
||||||
|
nas_photo_path = os.path.join(nas_dir, original_name)
|
||||||
|
|
||||||
|
# New file storage pattern
|
||||||
|
sanitized_id = re.sub(r'[^a-zA-Z0-9_-]', '', str(photo.id))
|
||||||
|
name_with_id = f"{base_name}_{sanitized_id}{ext}"
|
||||||
|
nas_photo_path_with_id = os.path.join(nas_dir, name_with_id)
|
||||||
|
|
||||||
|
if os.path.exists(nas_photo_path):
|
||||||
|
# Rename the existing file to include the photo ID
|
||||||
|
try:
|
||||||
|
os.rename(nas_photo_path, nas_photo_path_with_id)
|
||||||
|
logging.info(f"Renamed existing file {nas_photo_path} to {nas_photo_path_with_id}")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Failed to rename file {nas_photo_path}")
|
||||||
|
|
||||||
|
return nas_photo_path_with_id
|
||||||
|
|
||||||
|
def handle_media_download(photo, temp_dir, nas_dir, album_names):
|
||||||
|
photo_name = photo.filename
|
||||||
|
temp_path = os.path.join(temp_dir, photo_name)
|
||||||
|
|
||||||
|
nas_photo_path = ensure_unique_filename(photo, nas_dir)
|
||||||
|
|
||||||
|
added_photo = False
|
||||||
|
|
||||||
|
if not OVERWRITE and os.path.exists(nas_photo_path):
|
||||||
|
logging.debug("Photo %s already exists on NAS. Skipping", photo_name)
|
||||||
|
return added_photo
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
download_media(photo, temp_path)
|
||||||
|
|
||||||
|
handle_live_photo(photo, temp_dir, nas_dir)
|
||||||
|
|
||||||
|
copy_media(photo, temp_path, nas_photo_path, temp_path)
|
||||||
|
logging.info(f"Successfully saved photo {photo.id} to NAS at {nas_photo_path}\n")
|
||||||
|
added_photo = True
|
||||||
|
except Exception as e:
|
||||||
|
logging.error("Failed to process photo %s: %s", photo_name, e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
# Create symbolic links in each album directory
|
||||||
|
for album_name in album_names:
|
||||||
|
album_dir = os.path.join(nas_dir, album_name)
|
||||||
|
os.makedirs(album_dir, exist_ok=True)
|
||||||
|
album_photo_path = os.path.join(album_dir, photo_name)
|
||||||
|
if not os.path.exists(album_photo_path):
|
||||||
|
try:
|
||||||
|
os.symlink(nas_photo_path, album_photo_path)
|
||||||
|
logging.debug("Created symlink for photo %s in album %s", photo_name, album_name)
|
||||||
|
except FileExistsError:
|
||||||
|
logging.warning("Symlink for photo %s in album %s already exists", photo_name, album_name)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error("Failed to create symlink for photo %s in album %s: %s", photo_name, album_name, e)
|
||||||
|
return added_photo
|
||||||
|
|
||||||
|
def download_media(media, temp_path, *download_args):
|
||||||
|
logging.debug("Downloading media: %s", media.filename)
|
||||||
|
with open(temp_path, 'wb') as f:
|
||||||
|
for chunk in media.download(*download_args).iter_content(chunk_size=1024):
|
||||||
|
if chunk:
|
||||||
|
f.write(chunk)
|
||||||
|
logging.debug("Downloaded media: %s", media.filename)
|
||||||
|
|
||||||
|
ensure_metadata(media, temp_path)
|
||||||
|
|
||||||
|
def ensure_metadata(media, file_path):
|
||||||
|
try:
|
||||||
|
mime_type, _ = mimetypes.guess_type(file_path)
|
||||||
|
if mime_type not in ["image/jpeg", "image/tiff"]:
|
||||||
|
logging.debug(f"Skipping metadata check for unsupported file type {mime_type} ({file_path})")
|
||||||
|
return
|
||||||
|
|
||||||
|
exif_dict = piexif.load(file_path)
|
||||||
|
|
||||||
|
datetime_original = exif_dict.get("Exif", {}).get(piexif.ExifIFD.DateTimeOriginal)
|
||||||
|
|
||||||
|
if not datetime_original:
|
||||||
|
logging.debug(f"Missing datetime metadata for file {file_path}")
|
||||||
|
|
||||||
|
datetime_value = media.created.strftime("%Y:%m:%d %H:%M:%S")
|
||||||
|
|
||||||
|
exif_dict["Exif"][piexif.ExifIFD.DateTimeOriginal] = datetime_value
|
||||||
|
exif_dict["Exif"][piexif.ExifIFD.DateTimeDigitized] = datetime_value
|
||||||
|
exif_dict["0th"][piexif.ImageIFD.DateTime] = datetime_value
|
||||||
|
|
||||||
|
exif_bytes = piexif.dump(exif_dict)
|
||||||
|
piexif.insert(exif_bytes, file_path)
|
||||||
|
logging.debug(f"Updated Exif metadata for file {file_path}\nDatetime: {datetime_value}")
|
||||||
|
else:
|
||||||
|
logging.debug(f"File already has datetime metadata {file_path}")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error ensuring metadata for file {file_path}: {e}")
|
||||||
|
|
||||||
|
def copy_media(media, source_path, destination_path, temp_path):
|
||||||
|
logging.debug("Copying media %s to NAS", media.filename)
|
||||||
|
shutil.copy2(source_path, destination_path)
|
||||||
|
logging.debug("Copied media %s to NAS", media.filename)
|
||||||
|
|
||||||
|
os.remove(temp_path)
|
||||||
|
logging.debug("Removed temp file: %s", temp_path)
|
||||||
|
|
||||||
|
def handle_live_photo(photo, temp_dir, nas_dir):
|
||||||
|
base_name, ext = os.path.splitext(photo.filename)
|
||||||
|
|
||||||
|
logging.debug(f"Checking for live photo: {json.dumps(photo._versions, indent=4)}")
|
||||||
|
if 'medium' in photo._versions and photo._versions['medium']['type'] == 'com.apple.quicktime-movie':
|
||||||
|
logging.debug("This is a live photo")
|
||||||
|
video_name = f"{base_name}.mov"
|
||||||
|
temp_video_path = os.path.join(temp_dir, video_name)
|
||||||
|
nas_video_path = os.path.join(nas_dir, video_name)
|
||||||
|
|
||||||
|
if not os.path.exists(nas_video_path):
|
||||||
|
logging.debug("Downloading Live Photo video component: %s", video_name)
|
||||||
|
download_media(photo, temp_video_path, "medium")
|
||||||
|
|
||||||
|
copy_media(photo, temp_video_path, nas_video_path, temp_video_path)
|
||||||
|
|
||||||
|
def get_all_media(api, temp_dir, nas_dir):
|
||||||
|
all_albums = api.photos.albums
|
||||||
|
photo_album_map = {}
|
||||||
|
unique_albums = set()
|
||||||
|
|
||||||
|
for album in all_albums.values():
|
||||||
|
album_name = album.title
|
||||||
|
unique_albums.add(album_name)
|
||||||
|
# if album_name != "WhatsApp":
|
||||||
|
# continue
|
||||||
|
# if album_name != "Cars":
|
||||||
|
# if album_name != "Videos":
|
||||||
|
# continue
|
||||||
|
logging.info("Categorizing photos in album: %s", album_name)
|
||||||
|
|
||||||
|
for photo in album.photos:
|
||||||
|
if photo not in photo_album_map:
|
||||||
|
photo_album_map[photo] = []
|
||||||
|
photo_album_map[photo].append(album_name)
|
||||||
|
|
||||||
|
logging.info("Found %d unique photos in iCloud", len(photo_album_map))
|
||||||
|
unique_albums = '\n'.join(unique_albums)
|
||||||
|
logging.info(f"All album names: {unique_albums}")
|
||||||
|
|
||||||
|
# Download each photo and handle its album associations
|
||||||
|
i = 0
|
||||||
|
for photo, album_names in photo_album_map.items():
|
||||||
|
if i > 100:
|
||||||
|
scan_library(NAS_IMMICH_LIBRARY_ID)
|
||||||
|
logging.info("Sleeping while Immich processes new photos")
|
||||||
|
time.sleep(600)
|
||||||
|
logging.info("Resuming...")
|
||||||
|
i = 0
|
||||||
|
|
||||||
|
added_photo = handle_media_download(photo, temp_dir, nas_dir, album_names)
|
||||||
|
if added_photo:
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_immich_libraries():
|
||||||
|
url = "http://immich_server:2283/api/libraries"
|
||||||
|
payload = {}
|
||||||
|
headers = {
|
||||||
|
"Accept": "application/json",
|
||||||
|
"x-api-key": IMMICH_API_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.get(url, headers=headers, data=payload)
|
||||||
|
logging.info(f"Libraries: {response.json()}")
|
||||||
|
|
||||||
|
def scan_library(library_id):
|
||||||
|
try:
|
||||||
|
url = f"http://immich_server:2283/api/libraries/{library_id}/scan"
|
||||||
|
payload = {}
|
||||||
|
headers = {
|
||||||
|
"Accept": "application/json",
|
||||||
|
"x-api-key": IMMICH_API_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
logging.info("Triggering library scan...")
|
||||||
|
response = requests.post(url, headers=headers, data=payload)
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError as e:
|
||||||
|
logging.error(f"Attempt to trigger Immich library scan failed due to HTTP error: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Attempt to trigger Immich library scan failed: {e}")
|
||||||
|
|
||||||
|
logging.info("Successfully started library scan")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def main():
|
||||||
|
icloud_username = os.getenv("ICLOUD_USERNAME")
|
||||||
|
icloud_password = os.getenv("ICLOUD_PASSWORD")
|
||||||
|
TEMP_DIR = os.getenv("TEMP_DIR", "/backup/temp")
|
||||||
|
NAS_DIR = os.getenv("NAS_DIR", "/mnt/photos_backup/icloud")
|
||||||
|
COOKIE_DIR = os.getenv("COOKIE_DIR", "/app/.pyicloud")
|
||||||
|
|
||||||
|
if not NAS_IMMICH_LIBRARY_ID:
|
||||||
|
logging.error("NAS_IMMICH_LIBRARY_ID must be set. Exiting...")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not IMMICH_API_KEY:
|
||||||
|
logging.error("IMMICH_API_KEY must be set. Exiting...")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not icloud_username or not icloud_password:
|
||||||
|
logging.error("icloud credentials not set. Please set ICLOUD_USERNAME and ICLOUD_PASSWORD environment variables.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Create necessary directories if they don't exist
|
||||||
|
os.makedirs(TEMP_DIR, exist_ok=True)
|
||||||
|
os.makedirs(NAS_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
completed = False
|
||||||
|
|
||||||
|
while not completed:
|
||||||
|
try:
|
||||||
|
api = authenticate_icloud(icloud_username, icloud_password, COOKIE_DIR)
|
||||||
|
completed = get_all_media(api, TEMP_DIR, NAS_DIR)
|
||||||
|
except PyiCloudAPIResponseException as e:
|
||||||
|
if e.code == 410:
|
||||||
|
logging.warning(f"Detected a gone error, restarting...")
|
||||||
|
else:
|
||||||
|
logging.error(f"Detected another HTTP error: {e}")
|
||||||
|
time.sleep(60)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Encountered an error: {e}")
|
||||||
|
raise e
|
||||||
|
logging.info(f"Completed backup without erroring")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# pyicloud==1.0.0
|
||||||
|
git+https://github.com/picklepete/pyicloud.git@41be37dc373201157aa497d0baf8b8c35c119fa8#pyicloud
|
||||||
|
srp==1.0.22
|
||||||
|
|
||||||
|
piexif
|
||||||
Loading…
x
Reference in New Issue
Block a user