Compare commits

..

No commits in common. "2899cd50c8c01143f453806813bb6f37e429b4aa" and "afc30a195d42111912831adbfe8288c69848e3e6" have entirely different histories.

7 changed files with 126 additions and 527 deletions

View file

@ -1,216 +0,0 @@
#!/usr/bin/env python3
from subprocess import check_output, CalledProcessError
from datetime import datetime, timedelta
from pathlib import Path
import json
from argparse import ArgumentParser
from concurrent.futures import ThreadPoolExecutor, as_completed
from os import cpu_count
from time import sleep
EXT_GROUPS = {
"quicktime": {".mp4", ".mov", ".heic", ".cr3"},
"exif": {".jpg", ".jpeg", ".cr2"},
}
DATETIME_KEYS = [
("Composite", "SubSecDateTimeOriginal"),
("Composite", "SubSecCreateDate"),
('ExifIFD', 'DateTimeOriginal'),
('ExifIFD', 'CreateDate'),
('XMP-xmp', 'CreateDate'),
('Keys', 'CreationDate'),
('QuickTime', 'CreateDate'),
('XMP-photoshop', 'DateCreated'),
]
def run(command):
return check_output(command, text=True).strip()
def mdls_timestamp(file):
for i in range(5): # retry a few times in case of transient mdls failures
try:
output = run(('mdls', '-raw', '-name', 'kMDItemContentCreationDate', file))
except CalledProcessError as e:
print(f"{file}: Error running mdls (attempt {i+1}/5): {e}")
continue
try:
return datetime.strptime(output, "%Y-%m-%d %H:%M:%S %z")
except ValueError as e:
print(f"{file}: Error parsing mdls output (attempt {i+1}/5): {e}")
continue
sleep(1)
raise RuntimeError(f"Failed to get mdls timestamp for {file} after 5 attempts")
def exiftool_data(file):
try:
output = run((
'exiftool',
'-j', # json
'-a', # unknown tags
'-u', # unknown values
'-g1', # group by category
'-time:all', # all time tags
'-api', 'QuickTimeUTC=1', # use UTC for QuickTime timestamps
'-d', '%Y-%m-%dT%H:%M:%S%z',
file,
))
except CalledProcessError as e:
print(f"Error running exiftool: {e}")
return None
else:
return json.loads(output)[0]
def exiftool_timestamp(file):
data = exiftool_data(file)
for category, key in DATETIME_KEYS:
try:
value = data[category][key]
return category, key, datetime.strptime(value, '%Y-%m-%dT%H:%M:%S%z')
except (TypeError, KeyError, ValueError) as e:
continue
print(f"⚠️ {file}: No timestamp found in exiftool: " + json.dumps(data, indent=2))
return None, None, None
def photo_has_embedded_timestamp(file):
mdls_ts = mdls_timestamp(file)
category, key, exiftool_ts = exiftool_timestamp(file)
if not exiftool_ts:
print(f"⚠️ {file}: No timestamp found in exiftool")
return False
# normalize timezone for comparison
exiftool_ts = exiftool_ts.astimezone(mdls_ts.tzinfo)
delta = abs(mdls_ts - exiftool_ts)
if delta < timedelta(hours=1): # allow for small differences
print(f"✅ {file}: {mdls_ts.isoformat()} (#{category}:{key})")
return True
else:
print(f"⚠️ {file}: {mdls_ts.isoformat()} != {exiftool_ts} (Δ={delta})")
return False
def photos_without_embedded_timestamps(directory):
executor = ThreadPoolExecutor(max_workers=cpu_count()//2)
try:
futures = {
executor.submit(photo_has_embedded_timestamp, file): file
for file in directory.iterdir()
if file.is_file()
if file.suffix.lower() not in {".aae"}
if not file.name.startswith('.')
}
for future in as_completed(futures):
file = futures[future]
has_ts = future.result() # raises immediately on first failed future
if has_ts:
file.rename(file.parent / 'ok' / file.name)
else:
yield file
except Exception:
executor.shutdown(wait=False, cancel_futures=True)
raise
else:
executor.shutdown(wait=True)
def exiftool_write(file, assignments):
print(f"🔵 {file}: Writing -- {assignments}")
return run((
"exiftool", "-overwrite_original",
"-api", "QuickTimeUTC=1",
*[
f"-{group}:{tag}={value}"
for group, tag, value in assignments
],
str(file),
))
def add_missing_timestamp(file):
data = exiftool_data(file)
mdls_ts = mdls_timestamp(file)
offset = mdls_ts.strftime("%z")
offset = f"{offset[:3]}:{offset[3:]}" if len(offset) == 5 else offset
exif_ts = mdls_ts.strftime("%Y:%m:%d %H:%M:%S")
qt_ts = mdls_ts.strftime("%Y:%m:%d %H:%M:%S")
qt_ts_tz = f"{qt_ts}{offset}"
ext = file.suffix.lower()
try:
if ext in {".heic"}:
exiftool_write(file, [
("ExifIFD", "DateTimeOriginal", qt_ts),
("ExifIFD", "CreateDate", qt_ts),
("ExifIFD", "OffsetTime", offset),
("ExifIFD", "OffsetTimeOriginal", offset),
("ExifIFD", "OffsetTimeDigitized", offset),
("QuickTime", "CreateDate", qt_ts_tz),
("Keys", "CreationDate", qt_ts_tz),
("XMP-xmp", "CreateDate", qt_ts_tz),
])
elif "QuickTime" in data or ext in {".mp4", ".mov", ".heic", ".cr3"}:
exiftool_write(file, [
("QuickTime", "CreateDate", qt_ts_tz),
("Keys", "CreationDate", qt_ts_tz),
])
elif "ExifIFD" in data or ext in {".jpg", ".jpeg", ".cr2", ".webp"}:
exiftool_write(file, [
("ExifIFD", "DateTimeOriginal", exif_ts),
("ExifIFD", "CreateDate", exif_ts),
("IFD0", "ModifyDate", exif_ts),
("ExifIFD", "OffsetTime", offset),
("ExifIFD", "OffsetTimeOriginal", offset),
("ExifIFD", "OffsetTimeDigitized", offset),
])
elif ext in {".png", ".gif", ".avif"}:
exiftool_write(file, [
("XMP-xmp", "CreateDate", qt_ts_tz),
("XMP-photoshop", "DateCreated", exif_ts),
])
else:
print(f"❌ {file}: unsupported type, skipped")
return
if photo_has_embedded_timestamp(file):
print(f"✅ {file}: Timestamp successfully added: {mdls_ts.isoformat()}")
file.rename(file.parent / 'processed' / file.name)
return
else:
category, key, exiftool_ts = exiftool_timestamp(file)
print(f"❌ {file}: Timestamp still wrong/missing after write '{category}:{key}:{exiftool_ts}': #{json.dumps(data, indent=4)}")
return
except CalledProcessError as e:
print(f"❌ {file}: Failed to write timestamp: {e}")
return
if __name__ == "__main__":
parser = ArgumentParser(description="Print timestamps of photos in the current directory.")
parser.add_argument("-d", "--directory", help="Directory to scan for photos")
args = parser.parse_args()
directory = Path(args.directory)
(directory/'ok').mkdir(exist_ok=True)
(directory/'processed').mkdir(exist_ok=True)
_photos_without_embedded_timestamps = list(photos_without_embedded_timestamps(directory))
print(f"{len(_photos_without_embedded_timestamps)} photos without embedded timestamps found.")
print("Press Enter to add missing timestamps...")
input()
for file in _photos_without_embedded_timestamps:
add_missing_timestamp(file)

View file

@ -1,209 +1,110 @@
#!/usr/bin/env python3
import argparse
import base64
import hashlib
import os
import shutil
import subprocess
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime
from pathlib import Path
#!/bin/bash
set -euo pipefail
if [[ $# -ne 4 ]]; then
echo "Usage: $0 <nc_user> <source_subdir> <dest_subdir> <unsortable_subdir>" >&2
exit 1
fi
ALLOWED_EXTS = {
".png", ".jpg", ".jpeg", ".heic", ".cr2", ".cr3", ".mp4", ".mov",
".webp", ".avif", ".gif",
NC_USER="$1"
SOURCE_SUBDIR="$2"
DEST_SUBDIR="$3"
UNSORTABLE_SUBDIR="$4"
REL_SOURCE_PATH="/$NC_USER/files/$SOURCE_SUBDIR"
ABS_SOURCE_PATH="/var/lib/nextcloud/$NC_USER/files/$SOURCE_SUBDIR"
REL_DEST_PATH="/$NC_USER/files/$DEST_SUBDIR"
ABS_DEST_PATH="/var/lib/nextcloud/$NC_USER/files/$DEST_SUBDIR"
REL_UNSORTABLE_PATH="/$NC_USER/files/$UNSORTABLE_SUBDIR"
ABS_UNSORTABLE_PATH="/var/lib/nextcloud/$NC_USER/files/$UNSORTABLE_SUBDIR"
echo "STARTING..."
chown -R www-data:www-data "$ABS_SOURCE_PATH"
chmod -R 770 "$ABS_SOURCE_PATH"
process_file() {
local f="$1"
local DATETIME DATE TIME YEAR MONTH DAY HOUR MINUTE SECOND HASH EXT RAW FILE RELPATH DIRNAME
echo "PROCESSING: $f"
DATETIME="$(
exiftool -s -s -s -CreateDate "$f" 2>/dev/null | head -n1
)"
if [[ -z "$DATETIME" ]]; then
DATETIME="$(
exiftool -s -s -s -FileModifyDate "$f" 2>/dev/null | head -n1 | cut -d'+' -f1 | cut -d'-' -f1
)"
fi
if [[ -z "$DATETIME" ]]; then
RELPATH="$(realpath --relative-to="$ABS_SOURCE_PATH" "$f")"
DIRNAME="$(dirname "$ABS_UNSORTABLE_PATH/$RELPATH")"
echo "UNSORTABLE: $f"
mkdir -p "$DIRNAME"
mv -n -- "$f" "$DIRNAME/"
return 0
fi
DATE="$(cut -d' ' -f1 <<< "$DATETIME")"
TIME="$(cut -d' ' -f2 <<< "$DATETIME" | cut -d'+' -f1)"
YEAR="$(cut -d':' -f1 <<< "$DATE")"
MONTH="$(cut -d':' -f2 <<< "$DATE")"
DAY="$(cut -d':' -f3 <<< "$DATE")"
HOUR="$(cut -d':' -f1 <<< "$TIME")"
MINUTE="$(cut -d':' -f2 <<< "$TIME")"
SECOND="$(cut -d':' -f3 <<< "$TIME")"
HASH="$(sha256sum "$f" | awk '{print $1}' | xxd -r -p | base64 | head -c 6 | tr '/+' '_-')"
EXT="$(tr '[:upper:]' '[:lower:]' <<< "${f##*.}")"
if [[ "$EXT" == "cr2" || "$EXT" == "cr3" ]]; then
RAW="raw/"
else
RAW=""
fi
FILE="$ABS_DEST_PATH/$YEAR-$MONTH/${RAW}${YEAR}${MONTH}${DAY}-${HOUR}${MINUTE}${SECOND}_${HASH}.${EXT}"
echo "DESTINATION: $FILE"
mkdir -p "$(dirname "$FILE")"
mv -- "$f" "$FILE"
}
DATETIME_KEYS = [
("Composite", "SubSecDateTimeOriginal"),
("Composite", "SubSecCreateDate"),
("ExifIFD", "DateTimeOriginal"),
("ExifIFD", "CreateDate"),
("XMP-xmp", "CreateDate"),
("Keys", "CreationDate"),
("QuickTime", "CreateDate"),
("XMP-photoshop", "DateCreated"),
]
mapfile -d '' -t FILES < <(
find "$ABS_SOURCE_PATH" -type f \( \
-iname '*.PNG' -o \
-iname '*.JPG' -o \
-iname '*.JPEG' -o \
-iname '*.HEIC' -o \
-iname '*.CR2' -o \
-iname '*.CR3' -o \
-iname '*.MP4' -o \
-iname '*.MOV' \
\) -print0
)
if ((${#FILES[@]})); then
export -f process_file
export ABS_SOURCE_PATH ABS_DEST_PATH ABS_UNSORTABLE_PATH
def run(command: list[str], check: bool = True) -> subprocess.CompletedProcess:
return subprocess.run(command, text=True, capture_output=True, check=check)
printf '%s\0' "${FILES[@]}" |
xargs -0 -n1 -P"$(nproc)" bash -c 'process_file "$1"' _
echo "SCANNING..."
chown -R www-data:www-data "$ABS_DEST_PATH"
chown -R www-data:www-data "$ABS_UNSORTABLE_PATH"
chmod -R 770 "$ABS_DEST_PATH"
chmod -R 770 "$ABS_UNSORTABLE_PATH"
sudo -u www-data php /opt/nextcloud/occ files:scan --path "$REL_SOURCE_PATH"
sudo -u www-data php /opt/nextcloud/occ files:scan --path "$REL_UNSORTABLE_PATH"
sudo -u www-data php /opt/nextcloud/occ files:scan --path "$REL_DEST_PATH"
else
echo "NO MATCHING FILES FOUND."
fi
def exiftool_data(file: Path) -> dict | None:
result = run([
"exiftool",
"-j",
"-a",
"-u",
"-g1",
"-time:all",
"-api", "QuickTimeUTC=1",
"-d", "%Y-%m-%dT%H:%M:%S%z",
str(file),
], check=False)
if result.returncode != 0:
return None
try:
data = __import__("json").loads(result.stdout)
return data[0] if data else None
except Exception:
return None
def exiftool_timestamp(file: Path) -> datetime | None:
data = exiftool_data(file)
if not data:
return None
for category, key in DATETIME_KEYS:
try:
value = data[category][key]
except (KeyError, TypeError):
continue
try:
return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S%z")
except ValueError:
continue
return None
def short_hash(file: Path) -> str:
h = hashlib.sha256()
with file.open("rb") as fh:
for chunk in iter(lambda: fh.read(1024 * 1024), b""):
h.update(chunk)
digest = h.digest()
b64 = base64.b64encode(digest).decode("ascii")
return b64[:3].replace("/", "_").replace("+", "-")
def build_destination(dest_root: Path, file: Path, ts: datetime) -> Path:
ext = file.suffix.lower().lstrip(".")
year = ts.strftime("%Y")
month = ts.strftime("%m")
day = ts.strftime("%d")
hour = ts.strftime("%H")
minute = ts.strftime("%M")
second = ts.strftime("%S")
hash_part = short_hash(file)
raw_subdir = "raw" if ext in {"cr2", "cr3"} else None
month_dir = dest_root / f"{year}-{month}"
if raw_subdir:
month_dir = month_dir / raw_subdir
filename = f"{year}{month}{day}-{hour}{minute}{second}_{hash_part}.{ext}"
return month_dir / filename
def move_unsortable(file: Path, source_root: Path, unsortable_root: Path) -> None:
relpath = file.relative_to(source_root)
target_dir = (unsortable_root / relpath).parent
target_dir.mkdir(parents=True, exist_ok=True)
shutil.chown(str(target_dir), user="www-data", group="www-data")
target = target_dir / file.name
if target.exists():
return
shutil.move(str(file), str(target))
shutil.chown(str(target), user="www-data", group="www-data")
def move_sorted(file: Path, target: Path) -> None:
target.parent.mkdir(parents=True, exist_ok=True)
shutil.chown(str(target.parent), user="www-data", group="www-data")
shutil.move(str(file), str(target))
shutil.chown(str(target), user="www-data", group="www-data")
def process_file(file: Path, source_root: Path, dest_root: Path, unsortable_root: Path) -> tuple[Path, str]:
print(f"PROCESSING: {file}")
ts = exiftool_timestamp(file)
if ts is None:
print(f"UNSORTABLE: {file}")
move_unsortable(file, source_root, unsortable_root)
return file, "unsortable"
target = build_destination(dest_root, file, ts)
print(f"DESTINATION: {target}")
move_sorted(file, target)
return file, "sorted"
def scan_nextcloud(rel_source: str, rel_unsortable: str, rel_dest: str) -> None:
print("SCANNING...")
# run(["chown", "-R", "www-data:www-data", abs_source_path], check=True)
# run(["chmod", "-R", "770", abs_source_path], check=True)
# run(["chown", "-R", "www-data:www-data", abs_dest_path], check=True)
# run(["chown", "-R", "www-data:www-data", abs_unsortable_path], check=True)
# run(["chmod", "-R", "770", abs_dest_path], check=True)
# run(["chmod", "-R", "770", abs_unsortable_path], check=True)
run(["sudo", "-u", "www-data", "php", "/opt/nextcloud/occ", "files:scan", "--path", rel_source], check=True)
run(["sudo", "-u", "www-data", "php", "/opt/nextcloud/occ", "files:scan", "--path", rel_unsortable], check=True)
run(["sudo", "-u", "www-data", "php", "/opt/nextcloud/occ", "files:scan", "--path", rel_dest], check=True)
run(["systemctl", "start", "nextcloud-generate-new-previews.service"], check=True)
def iter_files(source_root: Path):
for path in source_root.rglob("*"):
if path.is_file() and path.suffix.lower() in ALLOWED_EXTS:
yield path
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Sort Nextcloud media files by embedded timestamp."
)
parser.add_argument("nc_user")
parser.add_argument("source_subdir")
parser.add_argument("dest_subdir")
parser.add_argument("unsortable_subdir")
parser.add_argument("--workers", type=int, default=os.cpu_count() or 1)
args = parser.parse_args()
nc_user = args.nc_user
source_subdir = args.source_subdir
dest_subdir = args.dest_subdir
unsortable_subdir = args.unsortable_subdir
rel_source_path = f"/{nc_user}/files/{source_subdir}"
abs_source_path = f"/var/lib/nextcloud/{nc_user}/files/{source_subdir}"
rel_dest_path = f"/{nc_user}/files/{dest_subdir}"
abs_dest_path = f"/var/lib/nextcloud/{nc_user}/files/{dest_subdir}"
rel_unsortable_path = f"/{nc_user}/files/{unsortable_subdir}"
abs_unsortable_path = f"/var/lib/nextcloud/{nc_user}/files/{unsortable_subdir}"
source_root = Path(abs_source_path)
dest_root = Path(abs_dest_path)
unsortable_root = Path(abs_unsortable_path)
print("STARTING...")
run(["chown", "-R", "www-data:www-data", str(source_root)], check=True)
run(["chmod", "-R", "770", str(source_root)], check=True)
files = list(iter_files(source_root))
if not files:
print("NO MATCHING FILES FOUND.")
print("FINISH.")
raise SystemExit(0)
with ThreadPoolExecutor(max_workers=max(1, args.workers)) as executor:
futures = {
executor.submit(process_file, file, source_root, dest_root, unsortable_root): file
for file in files
}
for future in as_completed(futures):
future.result()
scan_nextcloud(rel_source_path, rel_unsortable_path, rel_dest_path)
print("FINISH.")
echo "FINISH."

View file

@ -1,71 +0,0 @@
Nextcloud
=========
import iphone pictures
----------------------
Use Photos app on macOS
- select library in the left sidebar
- select the pictures
- in menu bar open File > Export Unmodified Original for X Photos
The only reliable way to get some files creation time is being lost with rsync, so
we need to embed those timestamps on macos first:
```sh
PHOTOS_PATH="/Users/mwiegand/Desktop/photos"
bin/timestamp_icloud_photos_for_nextcloud -d "$PHOTOS_PATH"
rsync -avh --progress --rsync-path="sudo rsync" "$PHOTOS_PATH/" ckn@10.0.0.2:/var/lib/nextcloud/ckn/files/SofortUpload/AutoSort/
```
preview generator
-----------------
```
sudo -u www-data php /opt/nextcloud/occ preview:generate-all -w "$(nproc)" -n -vvv
```
This index speeds up preview generator dramatically:
```sh
CREATE INDEX CONCURRENTLY oc_filecache_path_hash_idx
ON oc_filecache (path_hash);
```
delete previews:
```sh
psql nextcloud -x -c "DELETE FROM oc_previews;"
rm -rf /var/lib/nextcloud/appdata_oci6dw1woodz/preview/*
```
https://docs.nextcloud.com/server/stable/admin_manual/configuration_files/previews_configuration.html#maximum-preview-size
```php
'preview_max_x' => 1920,
'preview_max_y' => 1920,
'preview_max_scale_factor' => 4,
```
https://github.com/nextcloud/previewgenerator?tab=readme-ov-file#i-dont-want-to-generate-all-the-preview-sizes
```sh
sudo -u www-data php /opt/nextcloud/occ config:app:set --value="64 256" previewgenerator squareSizes
sudo -u www-data php /opt/nextcloud/occ config:app:set --value="" previewgenerator fillWidthHeightSizes # changed
sudo -u www-data php /opt/nextcloud/occ config:app:set --value="" previewgenerator widthSizes
sudo -u www-data php /opt/nextcloud/occ config:app:set --value="" previewgenerator heightSizes
sudo -u www-data php /opt/nextcloud/occ config:app:set preview jpeg_quality --value="75"
sudo -u www-data php /opt/nextcloud/occ config:app:set --value=0 --type=integer previewgenerator job_max_previews # in favour of systemd timer
```
gen previews
```sh
php /opt/nextcloud/occ preview:generate-all --workers="$(nproc)" --no-interaction -vvv
```
check preview geenration
```sh
find /var/lib/nextcloud/appdata_oci6dw1woodz/preview
# /var/lib/nextcloud/appdata_oci6dw1woodz/preview/6/9/1/f/7/b/4/2822419/64-64-crop.jpg
# /var/lib/nextcloud/appdata_oci6dw1woodz/preview/6/9/1/f/7/b/4/2822419/256-256-crop.jpg
# /var/lib/nextcloud/appdata_oci6dw1woodz/preview/6/9/1/f/7/b/4/2822419/1280-1920-max.jpg
du -sh /var/lib/nextcloud/appdata_oci6dw1woodz/preview
# 28G /var/lib/nextcloud/appdata_oci6dw1woodz/preview
```

View file

@ -0,0 +1,5 @@
#!/bin/bash
php /opt/nextcloud/occ files:scan --all
php /opt/nextcloud/occ files:scan-app-data
#php /opt/nextcloud/occ preview:generate-all

View file

@ -146,3 +146,15 @@ actions['nextcloud_add_missing_inidces'] = {
f'action:extract_nextcloud',
],
}
# RESCAN
files['/opt/nextcloud_rescan'] = {
'source': 'rescan',
'owner': 'www-data',
'group': 'www-data',
'mode': '550',
'needs': [
'action:extract_nextcloud',
],
}

View file

@ -1,5 +1,5 @@
from shlex import quote
import string
from uuid import UUID
defaults = {
'apt': {
@ -85,35 +85,11 @@ defaults = {
'user': 'www-data',
'kill_mode': 'process',
},
'nextcloud-scan-app-data': {
'command': '/usr/bin/php /opt/nextcloud/occ files:scan-app-data',
'when': 'yearly',
'nextcloud-rescan': {
'command': '/opt/nextcloud_rescan',
'when': 'Sun 00:00:00',
'user': 'www-data',
},
'nextcloud-scan-files': {
'command': '/usr/bin/php /opt/nextcloud/occ files:scan --all',
'when': 'weekly',
'user': 'www-data',
'after': {
'nextcloud-scan-app-data.service',
},
},
'nextcloud-generate-all-previews': {
'command': '/bin/bash -c ' + quote('php /opt/nextcloud/occ preview:generate-all --workers="$(nproc)" --no-interaction -vvv'),
'when': 'monthly',
'user': 'www-data',
'after': {
'nextcloud-scan-files.service',
},
},
'nextcloud-generate-new-previews': {
'command': '/usr/bin/php /opt/nextcloud/occ preview:pre-generate',
'when': '*:0/5', # every 5 minutes
'user': 'www-data',
'after': {
'nextcloud-generate-all-previews.service',
},
},
},
}
@ -158,18 +134,10 @@ def config(metadata):
'127.0.0.1',
metadata.get('nextcloud/hostname'),
],
'enabledPreviewProviders': [
'OC\\Preview\\Image',
'OC\\Preview\\Movie',
'OC\\Preview\\HEIC',
],
'preview_max_x': 1920,
'preview_max_y': 1920,
'preview_max_scale_factor': 4,
'log_type': 'syslog',
'syslog_tag': 'nextcloud',
'logfile': '',
'loglevel': 2,
'loglevel': 3,
'default_phone_region': 'DE',
'versions_retention_obligation': 'auto, 90',
'simpleSignUpLink.shown': False,

View file

@ -167,7 +167,7 @@
},
},
},
'systemd-swap': 24_000_000_000,
'systemd-swap': 4_000_000_000,
'twitch-clip-download': {
'channel_name': 'cronekorkn_',
},