Compare commits
4 commits
afc30a195d
...
2899cd50c8
| Author | SHA1 | Date | |
|---|---|---|---|
| 2899cd50c8 | |||
| b62649cae0 | |||
| 60c2c42a49 | |||
| fcd92db125 |
7 changed files with 527 additions and 126 deletions
216
bin/timestamp_icloud_photos_for_nextcloud
Normal file
216
bin/timestamp_icloud_photos_for_nextcloud
Normal file
|
|
@ -0,0 +1,216 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from subprocess import check_output, CalledProcessError
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
from os import cpu_count
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
EXT_GROUPS = {
|
||||||
|
"quicktime": {".mp4", ".mov", ".heic", ".cr3"},
|
||||||
|
"exif": {".jpg", ".jpeg", ".cr2"},
|
||||||
|
}
|
||||||
|
DATETIME_KEYS = [
|
||||||
|
("Composite", "SubSecDateTimeOriginal"),
|
||||||
|
("Composite", "SubSecCreateDate"),
|
||||||
|
('ExifIFD', 'DateTimeOriginal'),
|
||||||
|
('ExifIFD', 'CreateDate'),
|
||||||
|
('XMP-xmp', 'CreateDate'),
|
||||||
|
('Keys', 'CreationDate'),
|
||||||
|
('QuickTime', 'CreateDate'),
|
||||||
|
('XMP-photoshop', 'DateCreated'),
|
||||||
|
]
|
||||||
|
|
||||||
|
def run(command):
|
||||||
|
return check_output(command, text=True).strip()
|
||||||
|
|
||||||
|
|
||||||
|
def mdls_timestamp(file):
|
||||||
|
for i in range(5): # retry a few times in case of transient mdls failures
|
||||||
|
try:
|
||||||
|
output = run(('mdls', '-raw', '-name', 'kMDItemContentCreationDate', file))
|
||||||
|
except CalledProcessError as e:
|
||||||
|
print(f"{file}: Error running mdls (attempt {i+1}/5): {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
return datetime.strptime(output, "%Y-%m-%d %H:%M:%S %z")
|
||||||
|
except ValueError as e:
|
||||||
|
print(f"{file}: Error parsing mdls output (attempt {i+1}/5): {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
sleep(1)
|
||||||
|
|
||||||
|
raise RuntimeError(f"Failed to get mdls timestamp for {file} after 5 attempts")
|
||||||
|
|
||||||
|
|
||||||
|
def exiftool_data(file):
|
||||||
|
try:
|
||||||
|
output = run((
|
||||||
|
'exiftool',
|
||||||
|
'-j', # json
|
||||||
|
'-a', # unknown tags
|
||||||
|
'-u', # unknown values
|
||||||
|
'-g1', # group by category
|
||||||
|
'-time:all', # all time tags
|
||||||
|
'-api', 'QuickTimeUTC=1', # use UTC for QuickTime timestamps
|
||||||
|
'-d', '%Y-%m-%dT%H:%M:%S%z',
|
||||||
|
file,
|
||||||
|
))
|
||||||
|
except CalledProcessError as e:
|
||||||
|
print(f"Error running exiftool: {e}")
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return json.loads(output)[0]
|
||||||
|
|
||||||
|
def exiftool_timestamp(file):
|
||||||
|
data = exiftool_data(file)
|
||||||
|
for category, key in DATETIME_KEYS:
|
||||||
|
try:
|
||||||
|
value = data[category][key]
|
||||||
|
return category, key, datetime.strptime(value, '%Y-%m-%dT%H:%M:%S%z')
|
||||||
|
except (TypeError, KeyError, ValueError) as e:
|
||||||
|
continue
|
||||||
|
print(f"⚠️ {file}: No timestamp found in exiftool: " + json.dumps(data, indent=2))
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
|
||||||
|
def photo_has_embedded_timestamp(file):
|
||||||
|
mdls_ts = mdls_timestamp(file)
|
||||||
|
category, key, exiftool_ts = exiftool_timestamp(file)
|
||||||
|
|
||||||
|
if not exiftool_ts:
|
||||||
|
print(f"⚠️ {file}: No timestamp found in exiftool")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# normalize timezone for comparison
|
||||||
|
exiftool_ts = exiftool_ts.astimezone(mdls_ts.tzinfo)
|
||||||
|
delta = abs(mdls_ts - exiftool_ts)
|
||||||
|
|
||||||
|
if delta < timedelta(hours=1): # allow for small differences
|
||||||
|
print(f"✅ {file}: {mdls_ts.isoformat()} (#{category}:{key})")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print(f"⚠️ {file}: {mdls_ts.isoformat()} != {exiftool_ts} (Δ={delta})")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def photos_without_embedded_timestamps(directory):
|
||||||
|
executor = ThreadPoolExecutor(max_workers=cpu_count()//2)
|
||||||
|
try:
|
||||||
|
futures = {
|
||||||
|
executor.submit(photo_has_embedded_timestamp, file): file
|
||||||
|
for file in directory.iterdir()
|
||||||
|
if file.is_file()
|
||||||
|
if file.suffix.lower() not in {".aae"}
|
||||||
|
if not file.name.startswith('.')
|
||||||
|
}
|
||||||
|
|
||||||
|
for future in as_completed(futures):
|
||||||
|
file = futures[future]
|
||||||
|
has_ts = future.result() # raises immediately on first failed future
|
||||||
|
|
||||||
|
if has_ts:
|
||||||
|
file.rename(file.parent / 'ok' / file.name)
|
||||||
|
else:
|
||||||
|
yield file
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
executor.shutdown(wait=False, cancel_futures=True)
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
executor.shutdown(wait=True)
|
||||||
|
|
||||||
|
|
||||||
|
def exiftool_write(file, assignments):
|
||||||
|
print(f"🔵 {file}: Writing -- {assignments}")
|
||||||
|
return run((
|
||||||
|
"exiftool", "-overwrite_original",
|
||||||
|
"-api", "QuickTimeUTC=1",
|
||||||
|
*[
|
||||||
|
f"-{group}:{tag}={value}"
|
||||||
|
for group, tag, value in assignments
|
||||||
|
],
|
||||||
|
str(file),
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
def add_missing_timestamp(file):
|
||||||
|
data = exiftool_data(file)
|
||||||
|
mdls_ts = mdls_timestamp(file)
|
||||||
|
|
||||||
|
offset = mdls_ts.strftime("%z")
|
||||||
|
offset = f"{offset[:3]}:{offset[3:]}" if len(offset) == 5 else offset
|
||||||
|
|
||||||
|
exif_ts = mdls_ts.strftime("%Y:%m:%d %H:%M:%S")
|
||||||
|
qt_ts = mdls_ts.strftime("%Y:%m:%d %H:%M:%S")
|
||||||
|
qt_ts_tz = f"{qt_ts}{offset}"
|
||||||
|
ext = file.suffix.lower()
|
||||||
|
|
||||||
|
try:
|
||||||
|
if ext in {".heic"}:
|
||||||
|
exiftool_write(file, [
|
||||||
|
("ExifIFD", "DateTimeOriginal", qt_ts),
|
||||||
|
("ExifIFD", "CreateDate", qt_ts),
|
||||||
|
("ExifIFD", "OffsetTime", offset),
|
||||||
|
("ExifIFD", "OffsetTimeOriginal", offset),
|
||||||
|
("ExifIFD", "OffsetTimeDigitized", offset),
|
||||||
|
("QuickTime", "CreateDate", qt_ts_tz),
|
||||||
|
("Keys", "CreationDate", qt_ts_tz),
|
||||||
|
("XMP-xmp", "CreateDate", qt_ts_tz),
|
||||||
|
])
|
||||||
|
elif "QuickTime" in data or ext in {".mp4", ".mov", ".heic", ".cr3"}:
|
||||||
|
exiftool_write(file, [
|
||||||
|
("QuickTime", "CreateDate", qt_ts_tz),
|
||||||
|
("Keys", "CreationDate", qt_ts_tz),
|
||||||
|
])
|
||||||
|
elif "ExifIFD" in data or ext in {".jpg", ".jpeg", ".cr2", ".webp"}:
|
||||||
|
exiftool_write(file, [
|
||||||
|
("ExifIFD", "DateTimeOriginal", exif_ts),
|
||||||
|
("ExifIFD", "CreateDate", exif_ts),
|
||||||
|
("IFD0", "ModifyDate", exif_ts),
|
||||||
|
("ExifIFD", "OffsetTime", offset),
|
||||||
|
("ExifIFD", "OffsetTimeOriginal", offset),
|
||||||
|
("ExifIFD", "OffsetTimeDigitized", offset),
|
||||||
|
])
|
||||||
|
elif ext in {".png", ".gif", ".avif"}:
|
||||||
|
exiftool_write(file, [
|
||||||
|
("XMP-xmp", "CreateDate", qt_ts_tz),
|
||||||
|
("XMP-photoshop", "DateCreated", exif_ts),
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
print(f"❌ {file}: unsupported type, skipped")
|
||||||
|
return
|
||||||
|
|
||||||
|
if photo_has_embedded_timestamp(file):
|
||||||
|
print(f"✅ {file}: Timestamp successfully added: {mdls_ts.isoformat()}")
|
||||||
|
file.rename(file.parent / 'processed' / file.name)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
category, key, exiftool_ts = exiftool_timestamp(file)
|
||||||
|
print(f"❌ {file}: Timestamp still wrong/missing after write '{category}:{key}:{exiftool_ts}': #{json.dumps(data, indent=4)}")
|
||||||
|
return
|
||||||
|
except CalledProcessError as e:
|
||||||
|
print(f"❌ {file}: Failed to write timestamp: {e}")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = ArgumentParser(description="Print timestamps of photos in the current directory.")
|
||||||
|
parser.add_argument("-d", "--directory", help="Directory to scan for photos")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
directory = Path(args.directory)
|
||||||
|
(directory/'ok').mkdir(exist_ok=True)
|
||||||
|
(directory/'processed').mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
_photos_without_embedded_timestamps = list(photos_without_embedded_timestamps(directory))
|
||||||
|
print(f"{len(_photos_without_embedded_timestamps)} photos without embedded timestamps found.")
|
||||||
|
print("Press Enter to add missing timestamps...")
|
||||||
|
input()
|
||||||
|
|
||||||
|
for file in _photos_without_embedded_timestamps:
|
||||||
|
add_missing_timestamp(file)
|
||||||
|
|
@ -1,110 +1,209 @@
|
||||||
#!/bin/bash
|
#!/usr/bin/env python3
|
||||||
set -euo pipefail
|
import argparse
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
if [[ $# -ne 4 ]]; then
|
|
||||||
echo "Usage: $0 <nc_user> <source_subdir> <dest_subdir> <unsortable_subdir>" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
NC_USER="$1"
|
ALLOWED_EXTS = {
|
||||||
SOURCE_SUBDIR="$2"
|
".png", ".jpg", ".jpeg", ".heic", ".cr2", ".cr3", ".mp4", ".mov",
|
||||||
DEST_SUBDIR="$3"
|
".webp", ".avif", ".gif",
|
||||||
UNSORTABLE_SUBDIR="$4"
|
|
||||||
|
|
||||||
REL_SOURCE_PATH="/$NC_USER/files/$SOURCE_SUBDIR"
|
|
||||||
ABS_SOURCE_PATH="/var/lib/nextcloud/$NC_USER/files/$SOURCE_SUBDIR"
|
|
||||||
|
|
||||||
REL_DEST_PATH="/$NC_USER/files/$DEST_SUBDIR"
|
|
||||||
ABS_DEST_PATH="/var/lib/nextcloud/$NC_USER/files/$DEST_SUBDIR"
|
|
||||||
|
|
||||||
REL_UNSORTABLE_PATH="/$NC_USER/files/$UNSORTABLE_SUBDIR"
|
|
||||||
ABS_UNSORTABLE_PATH="/var/lib/nextcloud/$NC_USER/files/$UNSORTABLE_SUBDIR"
|
|
||||||
|
|
||||||
echo "STARTING..."
|
|
||||||
|
|
||||||
chown -R www-data:www-data "$ABS_SOURCE_PATH"
|
|
||||||
chmod -R 770 "$ABS_SOURCE_PATH"
|
|
||||||
|
|
||||||
process_file() {
|
|
||||||
local f="$1"
|
|
||||||
local DATETIME DATE TIME YEAR MONTH DAY HOUR MINUTE SECOND HASH EXT RAW FILE RELPATH DIRNAME
|
|
||||||
|
|
||||||
echo "PROCESSING: $f"
|
|
||||||
|
|
||||||
DATETIME="$(
|
|
||||||
exiftool -s -s -s -CreateDate "$f" 2>/dev/null | head -n1
|
|
||||||
)"
|
|
||||||
|
|
||||||
if [[ -z "$DATETIME" ]]; then
|
|
||||||
DATETIME="$(
|
|
||||||
exiftool -s -s -s -FileModifyDate "$f" 2>/dev/null | head -n1 | cut -d'+' -f1 | cut -d'-' -f1
|
|
||||||
)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$DATETIME" ]]; then
|
|
||||||
RELPATH="$(realpath --relative-to="$ABS_SOURCE_PATH" "$f")"
|
|
||||||
DIRNAME="$(dirname "$ABS_UNSORTABLE_PATH/$RELPATH")"
|
|
||||||
echo "UNSORTABLE: $f"
|
|
||||||
mkdir -p "$DIRNAME"
|
|
||||||
mv -n -- "$f" "$DIRNAME/"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
DATE="$(cut -d' ' -f1 <<< "$DATETIME")"
|
|
||||||
TIME="$(cut -d' ' -f2 <<< "$DATETIME" | cut -d'+' -f1)"
|
|
||||||
|
|
||||||
YEAR="$(cut -d':' -f1 <<< "$DATE")"
|
|
||||||
MONTH="$(cut -d':' -f2 <<< "$DATE")"
|
|
||||||
DAY="$(cut -d':' -f3 <<< "$DATE")"
|
|
||||||
HOUR="$(cut -d':' -f1 <<< "$TIME")"
|
|
||||||
MINUTE="$(cut -d':' -f2 <<< "$TIME")"
|
|
||||||
SECOND="$(cut -d':' -f3 <<< "$TIME")"
|
|
||||||
|
|
||||||
HASH="$(sha256sum "$f" | awk '{print $1}' | xxd -r -p | base64 | head -c 6 | tr '/+' '_-')"
|
|
||||||
EXT="$(tr '[:upper:]' '[:lower:]' <<< "${f##*.}")"
|
|
||||||
|
|
||||||
if [[ "$EXT" == "cr2" || "$EXT" == "cr3" ]]; then
|
|
||||||
RAW="raw/"
|
|
||||||
else
|
|
||||||
RAW=""
|
|
||||||
fi
|
|
||||||
|
|
||||||
FILE="$ABS_DEST_PATH/$YEAR-$MONTH/${RAW}${YEAR}${MONTH}${DAY}-${HOUR}${MINUTE}${SECOND}_${HASH}.${EXT}"
|
|
||||||
echo "DESTINATION: $FILE"
|
|
||||||
mkdir -p "$(dirname "$FILE")"
|
|
||||||
mv -- "$f" "$FILE"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mapfile -d '' -t FILES < <(
|
DATETIME_KEYS = [
|
||||||
find "$ABS_SOURCE_PATH" -type f \( \
|
("Composite", "SubSecDateTimeOriginal"),
|
||||||
-iname '*.PNG' -o \
|
("Composite", "SubSecCreateDate"),
|
||||||
-iname '*.JPG' -o \
|
("ExifIFD", "DateTimeOriginal"),
|
||||||
-iname '*.JPEG' -o \
|
("ExifIFD", "CreateDate"),
|
||||||
-iname '*.HEIC' -o \
|
("XMP-xmp", "CreateDate"),
|
||||||
-iname '*.CR2' -o \
|
("Keys", "CreationDate"),
|
||||||
-iname '*.CR3' -o \
|
("QuickTime", "CreateDate"),
|
||||||
-iname '*.MP4' -o \
|
("XMP-photoshop", "DateCreated"),
|
||||||
-iname '*.MOV' \
|
]
|
||||||
\) -print0
|
|
||||||
|
|
||||||
|
def run(command: list[str], check: bool = True) -> subprocess.CompletedProcess:
|
||||||
|
return subprocess.run(command, text=True, capture_output=True, check=check)
|
||||||
|
|
||||||
|
|
||||||
|
def exiftool_data(file: Path) -> dict | None:
|
||||||
|
result = run([
|
||||||
|
"exiftool",
|
||||||
|
"-j",
|
||||||
|
"-a",
|
||||||
|
"-u",
|
||||||
|
"-g1",
|
||||||
|
"-time:all",
|
||||||
|
"-api", "QuickTimeUTC=1",
|
||||||
|
"-d", "%Y-%m-%dT%H:%M:%S%z",
|
||||||
|
str(file),
|
||||||
|
], check=False)
|
||||||
|
if result.returncode != 0:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
data = __import__("json").loads(result.stdout)
|
||||||
|
return data[0] if data else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def exiftool_timestamp(file: Path) -> datetime | None:
|
||||||
|
data = exiftool_data(file)
|
||||||
|
if not data:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for category, key in DATETIME_KEYS:
|
||||||
|
try:
|
||||||
|
value = data[category][key]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S%z")
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def short_hash(file: Path) -> str:
|
||||||
|
h = hashlib.sha256()
|
||||||
|
with file.open("rb") as fh:
|
||||||
|
for chunk in iter(lambda: fh.read(1024 * 1024), b""):
|
||||||
|
h.update(chunk)
|
||||||
|
digest = h.digest()
|
||||||
|
b64 = base64.b64encode(digest).decode("ascii")
|
||||||
|
return b64[:3].replace("/", "_").replace("+", "-")
|
||||||
|
|
||||||
|
|
||||||
|
def build_destination(dest_root: Path, file: Path, ts: datetime) -> Path:
|
||||||
|
ext = file.suffix.lower().lstrip(".")
|
||||||
|
year = ts.strftime("%Y")
|
||||||
|
month = ts.strftime("%m")
|
||||||
|
day = ts.strftime("%d")
|
||||||
|
hour = ts.strftime("%H")
|
||||||
|
minute = ts.strftime("%M")
|
||||||
|
second = ts.strftime("%S")
|
||||||
|
hash_part = short_hash(file)
|
||||||
|
|
||||||
|
raw_subdir = "raw" if ext in {"cr2", "cr3"} else None
|
||||||
|
month_dir = dest_root / f"{year}-{month}"
|
||||||
|
if raw_subdir:
|
||||||
|
month_dir = month_dir / raw_subdir
|
||||||
|
|
||||||
|
filename = f"{year}{month}{day}-{hour}{minute}{second}_{hash_part}.{ext}"
|
||||||
|
return month_dir / filename
|
||||||
|
|
||||||
|
|
||||||
|
def move_unsortable(file: Path, source_root: Path, unsortable_root: Path) -> None:
|
||||||
|
relpath = file.relative_to(source_root)
|
||||||
|
target_dir = (unsortable_root / relpath).parent
|
||||||
|
target_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
shutil.chown(str(target_dir), user="www-data", group="www-data")
|
||||||
|
target = target_dir / file.name
|
||||||
|
if target.exists():
|
||||||
|
return
|
||||||
|
shutil.move(str(file), str(target))
|
||||||
|
shutil.chown(str(target), user="www-data", group="www-data")
|
||||||
|
|
||||||
|
|
||||||
|
def move_sorted(file: Path, target: Path) -> None:
|
||||||
|
target.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
shutil.chown(str(target.parent), user="www-data", group="www-data")
|
||||||
|
shutil.move(str(file), str(target))
|
||||||
|
shutil.chown(str(target), user="www-data", group="www-data")
|
||||||
|
|
||||||
|
def process_file(file: Path, source_root: Path, dest_root: Path, unsortable_root: Path) -> tuple[Path, str]:
|
||||||
|
print(f"PROCESSING: {file}")
|
||||||
|
ts = exiftool_timestamp(file)
|
||||||
|
|
||||||
|
if ts is None:
|
||||||
|
print(f"UNSORTABLE: {file}")
|
||||||
|
move_unsortable(file, source_root, unsortable_root)
|
||||||
|
return file, "unsortable"
|
||||||
|
|
||||||
|
target = build_destination(dest_root, file, ts)
|
||||||
|
print(f"DESTINATION: {target}")
|
||||||
|
move_sorted(file, target)
|
||||||
|
return file, "sorted"
|
||||||
|
|
||||||
|
|
||||||
|
def scan_nextcloud(rel_source: str, rel_unsortable: str, rel_dest: str) -> None:
|
||||||
|
print("SCANNING...")
|
||||||
|
# run(["chown", "-R", "www-data:www-data", abs_source_path], check=True)
|
||||||
|
# run(["chmod", "-R", "770", abs_source_path], check=True)
|
||||||
|
|
||||||
|
# run(["chown", "-R", "www-data:www-data", abs_dest_path], check=True)
|
||||||
|
# run(["chown", "-R", "www-data:www-data", abs_unsortable_path], check=True)
|
||||||
|
# run(["chmod", "-R", "770", abs_dest_path], check=True)
|
||||||
|
# run(["chmod", "-R", "770", abs_unsortable_path], check=True)
|
||||||
|
|
||||||
|
run(["sudo", "-u", "www-data", "php", "/opt/nextcloud/occ", "files:scan", "--path", rel_source], check=True)
|
||||||
|
run(["sudo", "-u", "www-data", "php", "/opt/nextcloud/occ", "files:scan", "--path", rel_unsortable], check=True)
|
||||||
|
run(["sudo", "-u", "www-data", "php", "/opt/nextcloud/occ", "files:scan", "--path", rel_dest], check=True)
|
||||||
|
|
||||||
|
run(["systemctl", "start", "nextcloud-generate-new-previews.service"], check=True)
|
||||||
|
|
||||||
|
|
||||||
|
def iter_files(source_root: Path):
|
||||||
|
for path in source_root.rglob("*"):
|
||||||
|
if path.is_file() and path.suffix.lower() in ALLOWED_EXTS:
|
||||||
|
yield path
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Sort Nextcloud media files by embedded timestamp."
|
||||||
)
|
)
|
||||||
|
parser.add_argument("nc_user")
|
||||||
|
parser.add_argument("source_subdir")
|
||||||
|
parser.add_argument("dest_subdir")
|
||||||
|
parser.add_argument("unsortable_subdir")
|
||||||
|
parser.add_argument("--workers", type=int, default=os.cpu_count() or 1)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
if ((${#FILES[@]})); then
|
nc_user = args.nc_user
|
||||||
export -f process_file
|
source_subdir = args.source_subdir
|
||||||
export ABS_SOURCE_PATH ABS_DEST_PATH ABS_UNSORTABLE_PATH
|
dest_subdir = args.dest_subdir
|
||||||
|
unsortable_subdir = args.unsortable_subdir
|
||||||
|
|
||||||
printf '%s\0' "${FILES[@]}" |
|
rel_source_path = f"/{nc_user}/files/{source_subdir}"
|
||||||
xargs -0 -n1 -P"$(nproc)" bash -c 'process_file "$1"' _
|
abs_source_path = f"/var/lib/nextcloud/{nc_user}/files/{source_subdir}"
|
||||||
|
|
||||||
echo "SCANNING..."
|
rel_dest_path = f"/{nc_user}/files/{dest_subdir}"
|
||||||
chown -R www-data:www-data "$ABS_DEST_PATH"
|
abs_dest_path = f"/var/lib/nextcloud/{nc_user}/files/{dest_subdir}"
|
||||||
chown -R www-data:www-data "$ABS_UNSORTABLE_PATH"
|
|
||||||
chmod -R 770 "$ABS_DEST_PATH"
|
|
||||||
chmod -R 770 "$ABS_UNSORTABLE_PATH"
|
|
||||||
sudo -u www-data php /opt/nextcloud/occ files:scan --path "$REL_SOURCE_PATH"
|
|
||||||
sudo -u www-data php /opt/nextcloud/occ files:scan --path "$REL_UNSORTABLE_PATH"
|
|
||||||
sudo -u www-data php /opt/nextcloud/occ files:scan --path "$REL_DEST_PATH"
|
|
||||||
else
|
|
||||||
echo "NO MATCHING FILES FOUND."
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "FINISH."
|
rel_unsortable_path = f"/{nc_user}/files/{unsortable_subdir}"
|
||||||
|
abs_unsortable_path = f"/var/lib/nextcloud/{nc_user}/files/{unsortable_subdir}"
|
||||||
|
|
||||||
|
source_root = Path(abs_source_path)
|
||||||
|
dest_root = Path(abs_dest_path)
|
||||||
|
unsortable_root = Path(abs_unsortable_path)
|
||||||
|
|
||||||
|
print("STARTING...")
|
||||||
|
|
||||||
|
run(["chown", "-R", "www-data:www-data", str(source_root)], check=True)
|
||||||
|
run(["chmod", "-R", "770", str(source_root)], check=True)
|
||||||
|
|
||||||
|
files = list(iter_files(source_root))
|
||||||
|
|
||||||
|
if not files:
|
||||||
|
print("NO MATCHING FILES FOUND.")
|
||||||
|
print("FINISH.")
|
||||||
|
raise SystemExit(0)
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=max(1, args.workers)) as executor:
|
||||||
|
futures = {
|
||||||
|
executor.submit(process_file, file, source_root, dest_root, unsortable_root): file
|
||||||
|
for file in files
|
||||||
|
}
|
||||||
|
for future in as_completed(futures):
|
||||||
|
future.result()
|
||||||
|
|
||||||
|
scan_nextcloud(rel_source_path, rel_unsortable_path, rel_dest_path)
|
||||||
|
|
||||||
|
print("FINISH.")
|
||||||
71
bundles/nextcloud/README.md
Normal file
71
bundles/nextcloud/README.md
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
Nextcloud
|
||||||
|
=========
|
||||||
|
|
||||||
|
import iphone pictures
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Use Photos app on macOS
|
||||||
|
- select library in the left sidebar
|
||||||
|
- select the pictures
|
||||||
|
- in menu bar open File > Export Unmodified Original for X Photos
|
||||||
|
|
||||||
|
The only reliable way to get some files creation time is being lost with rsync, so
|
||||||
|
we need to embed those timestamps on macos first:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PHOTOS_PATH="/Users/mwiegand/Desktop/photos"
|
||||||
|
bin/timestamp_icloud_photos_for_nextcloud -d "$PHOTOS_PATH"
|
||||||
|
rsync -avh --progress --rsync-path="sudo rsync" "$PHOTOS_PATH/" ckn@10.0.0.2:/var/lib/nextcloud/ckn/files/SofortUpload/AutoSort/
|
||||||
|
```
|
||||||
|
|
||||||
|
preview generator
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo -u www-data php /opt/nextcloud/occ preview:generate-all -w "$(nproc)" -n -vvv
|
||||||
|
```
|
||||||
|
|
||||||
|
This index speeds up preview generator dramatically:
|
||||||
|
```sh
|
||||||
|
CREATE INDEX CONCURRENTLY oc_filecache_path_hash_idx
|
||||||
|
ON oc_filecache (path_hash);
|
||||||
|
```
|
||||||
|
|
||||||
|
delete previews:
|
||||||
|
```sh
|
||||||
|
psql nextcloud -x -c "DELETE FROM oc_previews;"
|
||||||
|
rm -rf /var/lib/nextcloud/appdata_oci6dw1woodz/preview/*
|
||||||
|
```
|
||||||
|
|
||||||
|
https://docs.nextcloud.com/server/stable/admin_manual/configuration_files/previews_configuration.html#maximum-preview-size
|
||||||
|
```php
|
||||||
|
'preview_max_x' => 1920,
|
||||||
|
'preview_max_y' => 1920,
|
||||||
|
'preview_max_scale_factor' => 4,
|
||||||
|
```
|
||||||
|
|
||||||
|
https://github.com/nextcloud/previewgenerator?tab=readme-ov-file#i-dont-want-to-generate-all-the-preview-sizes
|
||||||
|
```sh
|
||||||
|
sudo -u www-data php /opt/nextcloud/occ config:app:set --value="64 256" previewgenerator squareSizes
|
||||||
|
sudo -u www-data php /opt/nextcloud/occ config:app:set --value="" previewgenerator fillWidthHeightSizes # changed
|
||||||
|
sudo -u www-data php /opt/nextcloud/occ config:app:set --value="" previewgenerator widthSizes
|
||||||
|
sudo -u www-data php /opt/nextcloud/occ config:app:set --value="" previewgenerator heightSizes
|
||||||
|
sudo -u www-data php /opt/nextcloud/occ config:app:set preview jpeg_quality --value="75"
|
||||||
|
sudo -u www-data php /opt/nextcloud/occ config:app:set --value=0 --type=integer previewgenerator job_max_previews # in favour of systemd timer
|
||||||
|
```
|
||||||
|
|
||||||
|
gen previews
|
||||||
|
```sh
|
||||||
|
php /opt/nextcloud/occ preview:generate-all --workers="$(nproc)" --no-interaction -vvv
|
||||||
|
```
|
||||||
|
|
||||||
|
check preview geenration
|
||||||
|
```sh
|
||||||
|
find /var/lib/nextcloud/appdata_oci6dw1woodz/preview
|
||||||
|
# /var/lib/nextcloud/appdata_oci6dw1woodz/preview/6/9/1/f/7/b/4/2822419/64-64-crop.jpg
|
||||||
|
# /var/lib/nextcloud/appdata_oci6dw1woodz/preview/6/9/1/f/7/b/4/2822419/256-256-crop.jpg
|
||||||
|
# /var/lib/nextcloud/appdata_oci6dw1woodz/preview/6/9/1/f/7/b/4/2822419/1280-1920-max.jpg
|
||||||
|
|
||||||
|
du -sh /var/lib/nextcloud/appdata_oci6dw1woodz/preview
|
||||||
|
# 28G /var/lib/nextcloud/appdata_oci6dw1woodz/preview
|
||||||
|
```
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
php /opt/nextcloud/occ files:scan --all
|
|
||||||
php /opt/nextcloud/occ files:scan-app-data
|
|
||||||
#php /opt/nextcloud/occ preview:generate-all
|
|
||||||
|
|
@ -146,15 +146,3 @@ actions['nextcloud_add_missing_inidces'] = {
|
||||||
f'action:extract_nextcloud',
|
f'action:extract_nextcloud',
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
# RESCAN
|
|
||||||
|
|
||||||
files['/opt/nextcloud_rescan'] = {
|
|
||||||
'source': 'rescan',
|
|
||||||
'owner': 'www-data',
|
|
||||||
'group': 'www-data',
|
|
||||||
'mode': '550',
|
|
||||||
'needs': [
|
|
||||||
'action:extract_nextcloud',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import string
|
from shlex import quote
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
defaults = {
|
defaults = {
|
||||||
'apt': {
|
'apt': {
|
||||||
|
|
@ -85,11 +85,35 @@ defaults = {
|
||||||
'user': 'www-data',
|
'user': 'www-data',
|
||||||
'kill_mode': 'process',
|
'kill_mode': 'process',
|
||||||
},
|
},
|
||||||
'nextcloud-rescan': {
|
'nextcloud-scan-app-data': {
|
||||||
'command': '/opt/nextcloud_rescan',
|
'command': '/usr/bin/php /opt/nextcloud/occ files:scan-app-data',
|
||||||
'when': 'Sun 00:00:00',
|
'when': 'yearly',
|
||||||
'user': 'www-data',
|
'user': 'www-data',
|
||||||
},
|
},
|
||||||
|
'nextcloud-scan-files': {
|
||||||
|
'command': '/usr/bin/php /opt/nextcloud/occ files:scan --all',
|
||||||
|
'when': 'weekly',
|
||||||
|
'user': 'www-data',
|
||||||
|
'after': {
|
||||||
|
'nextcloud-scan-app-data.service',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'nextcloud-generate-all-previews': {
|
||||||
|
'command': '/bin/bash -c ' + quote('php /opt/nextcloud/occ preview:generate-all --workers="$(nproc)" --no-interaction -vvv'),
|
||||||
|
'when': 'monthly',
|
||||||
|
'user': 'www-data',
|
||||||
|
'after': {
|
||||||
|
'nextcloud-scan-files.service',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'nextcloud-generate-new-previews': {
|
||||||
|
'command': '/usr/bin/php /opt/nextcloud/occ preview:pre-generate',
|
||||||
|
'when': '*:0/5', # every 5 minutes
|
||||||
|
'user': 'www-data',
|
||||||
|
'after': {
|
||||||
|
'nextcloud-generate-all-previews.service',
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -134,10 +158,18 @@ def config(metadata):
|
||||||
'127.0.0.1',
|
'127.0.0.1',
|
||||||
metadata.get('nextcloud/hostname'),
|
metadata.get('nextcloud/hostname'),
|
||||||
],
|
],
|
||||||
|
'enabledPreviewProviders': [
|
||||||
|
'OC\\Preview\\Image',
|
||||||
|
'OC\\Preview\\Movie',
|
||||||
|
'OC\\Preview\\HEIC',
|
||||||
|
],
|
||||||
|
'preview_max_x': 1920,
|
||||||
|
'preview_max_y': 1920,
|
||||||
|
'preview_max_scale_factor': 4,
|
||||||
'log_type': 'syslog',
|
'log_type': 'syslog',
|
||||||
'syslog_tag': 'nextcloud',
|
'syslog_tag': 'nextcloud',
|
||||||
'logfile': '',
|
'logfile': '',
|
||||||
'loglevel': 3,
|
'loglevel': 2,
|
||||||
'default_phone_region': 'DE',
|
'default_phone_region': 'DE',
|
||||||
'versions_retention_obligation': 'auto, 90',
|
'versions_retention_obligation': 'auto, 90',
|
||||||
'simpleSignUpLink.shown': False,
|
'simpleSignUpLink.shown': False,
|
||||||
|
|
|
||||||
|
|
@ -167,7 +167,7 @@
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
'systemd-swap': 4_000_000_000,
|
'systemd-swap': 24_000_000_000,
|
||||||
'twitch-clip-download': {
|
'twitch-clip-download': {
|
||||||
'channel_name': 'cronekorkn_',
|
'channel_name': 'cronekorkn_',
|
||||||
},
|
},
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue