Compare commits
6 Commits
main
...
multiproce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5bdd52df25 | ||
|
|
3cde0b79c9 | ||
|
|
e2bd262f75 | ||
|
|
db26532bab | ||
|
|
7a73b9168d | ||
|
|
79dcfb38a8 |
@ -1180,6 +1180,9 @@ Options:
|
||||
--save-config <config file path>
|
||||
Save options to file for use with --load-
|
||||
config. File format is TOML.
|
||||
-M, --multiprocess NUMBER_OF_PROCESSES
|
||||
Run export in parallel using
|
||||
NUMBER_OF_PROCESSES processes. [x>=1]
|
||||
--help Show this message and exit.
|
||||
|
||||
** Export **
|
||||
|
||||
@ -214,7 +214,8 @@ SEARCH_CATEGORY_PHOTO_NAME = 2056
|
||||
|
||||
|
||||
# Max filename length on MacOS
|
||||
MAX_FILENAME_LEN = 255
|
||||
# subtract 6 chars for the lock file extension in form: ".filename.lock"
|
||||
MAX_FILENAME_LEN = 255 - 6
|
||||
|
||||
# Max directory name length on MacOS
|
||||
MAX_DIRNAME_LEN = 255
|
||||
|
||||
779
osxphotos/cli.py
779
osxphotos/cli.py
@ -8,6 +8,7 @@ import dataclasses
|
||||
import datetime
|
||||
import io
|
||||
import json
|
||||
import multiprocessing as mp
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
@ -27,8 +28,10 @@ import osxmetadata
|
||||
import photoscript
|
||||
import rich.traceback
|
||||
import yaml
|
||||
from more_itertools import divide
|
||||
from rich import pretty, print
|
||||
from rich.console import Console
|
||||
from rich.progress import Progress
|
||||
from rich.syntax import Syntax
|
||||
|
||||
import osxphotos
|
||||
@ -148,7 +151,7 @@ def verbose_(*args, **kwargs):
|
||||
"""print output if verbose flag set"""
|
||||
if VERBOSE:
|
||||
styled_args = []
|
||||
timestamp = str(datetime.datetime.now()) + " -- " if VERBOSE_TIMESTAMP else ""
|
||||
timestamp = f"[{datetime.datetime.now()}] -- " if VERBOSE_TIMESTAMP else ""
|
||||
for arg in args:
|
||||
if type(arg) == str:
|
||||
arg = timestamp + arg
|
||||
@ -1169,6 +1172,13 @@ def cli(ctx, db, json_, debug):
|
||||
help=("Save options to file for use with --load-config. File format is TOML."),
|
||||
type=click.Path(),
|
||||
)
|
||||
@click.option(
|
||||
"--multiprocess",
|
||||
"-M",
|
||||
metavar="NUMBER_OF_PROCESSES",
|
||||
help="Run export in parallel using NUMBER_OF_PROCESSES processes. ",
|
||||
type=click.IntRange(min=1),
|
||||
)
|
||||
@click.option(
|
||||
"--beta",
|
||||
is_flag=True,
|
||||
@ -1338,6 +1348,7 @@ def export(
|
||||
preview_if_missing,
|
||||
profile,
|
||||
profile_sort,
|
||||
multiprocess,
|
||||
):
|
||||
"""Export photos from the Photos database.
|
||||
Export path DEST is required.
|
||||
@ -1868,198 +1879,28 @@ def export(
|
||||
# store results of export
|
||||
results = ExportResults()
|
||||
|
||||
if photos:
|
||||
if not photos:
|
||||
click.echo("Did not find any photos to export")
|
||||
else:
|
||||
num_photos = len(photos)
|
||||
# TODO: photos or photo appears several times, pull into a separate function
|
||||
photo_str = "photos" if num_photos > 1 else "photo"
|
||||
click.echo(f"Exporting {num_photos} {photo_str} to {dest}...")
|
||||
start_time = time.perf_counter()
|
||||
# though the command line option is current_name, internally all processing
|
||||
# logic uses original_name which is the boolean inverse of current_name
|
||||
# because the original code used --original-name as an option
|
||||
original_name = not current_name
|
||||
|
||||
# set up for --add-export-to-album if needed
|
||||
album_export = (
|
||||
PhotosAlbum(add_exported_to_album, verbose=verbose_)
|
||||
if add_exported_to_album
|
||||
else None
|
||||
)
|
||||
album_skipped = (
|
||||
PhotosAlbum(add_skipped_to_album, verbose=verbose_)
|
||||
if add_skipped_to_album
|
||||
else None
|
||||
)
|
||||
album_missing = (
|
||||
PhotosAlbum(add_missing_to_album, verbose=verbose_)
|
||||
if add_missing_to_album
|
||||
else None
|
||||
)
|
||||
|
||||
photo_num = 0
|
||||
# send progress bar output to /dev/null if verbose to hide the progress bar
|
||||
fp = open(os.devnull, "w") if verbose else None
|
||||
with click.progressbar(photos, show_pos=True, file=fp) as bar:
|
||||
for p in bar:
|
||||
photo_num += 1
|
||||
export_results = export_photo(
|
||||
photo=p,
|
||||
dest=dest,
|
||||
verbose=verbose,
|
||||
export_by_date=export_by_date,
|
||||
sidecar=sidecar,
|
||||
sidecar_drop_ext=sidecar_drop_ext,
|
||||
update=update,
|
||||
ignore_signature=ignore_signature,
|
||||
export_as_hardlink=export_as_hardlink,
|
||||
overwrite=overwrite,
|
||||
export_edited=export_edited,
|
||||
skip_original_if_edited=skip_original_if_edited,
|
||||
original_name=original_name,
|
||||
export_live=export_live,
|
||||
download_missing=download_missing,
|
||||
exiftool=exiftool,
|
||||
exiftool_merge_keywords=exiftool_merge_keywords,
|
||||
exiftool_merge_persons=exiftool_merge_persons,
|
||||
directory=directory,
|
||||
filename_template=filename_template,
|
||||
export_raw=export_raw,
|
||||
album_keyword=album_keyword,
|
||||
person_keyword=person_keyword,
|
||||
keyword_template=keyword_template,
|
||||
description_template=description_template,
|
||||
export_db=export_db,
|
||||
fileutil=fileutil,
|
||||
dry_run=dry_run,
|
||||
touch_file=touch_file,
|
||||
edited_suffix=edited_suffix,
|
||||
original_suffix=original_suffix,
|
||||
use_photos_export=use_photos_export,
|
||||
convert_to_jpeg=convert_to_jpeg,
|
||||
jpeg_quality=jpeg_quality,
|
||||
ignore_date_modified=ignore_date_modified,
|
||||
use_photokit=use_photokit,
|
||||
exiftool_option=exiftool_option,
|
||||
strip=strip,
|
||||
jpeg_ext=jpeg_ext,
|
||||
replace_keywords=replace_keywords,
|
||||
retry=retry,
|
||||
export_dir=dest,
|
||||
export_preview=preview,
|
||||
preview_suffix=preview_suffix,
|
||||
preview_if_missing=preview_if_missing,
|
||||
photo_num=photo_num,
|
||||
num_photos=num_photos,
|
||||
)
|
||||
|
||||
if post_function:
|
||||
for function in post_function:
|
||||
# post function is tuple of (function, filename.py::function_name)
|
||||
verbose_(f"Calling post-function {function[1]}")
|
||||
if not dry_run:
|
||||
try:
|
||||
function[0](p, export_results, verbose_)
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error running post-function {function[1]}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
run_post_command(
|
||||
photo=p,
|
||||
post_command=post_command,
|
||||
export_results=export_results,
|
||||
export_dir=dest,
|
||||
dry_run=dry_run,
|
||||
exiftool_path=exiftool_path,
|
||||
export_db=export_db,
|
||||
)
|
||||
|
||||
if album_export and export_results.exported:
|
||||
try:
|
||||
album_export.add(p)
|
||||
export_results.exported_album = [
|
||||
(filename, album_export.name)
|
||||
for filename in export_results.exported
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_export.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
if album_skipped and export_results.skipped:
|
||||
try:
|
||||
album_skipped.add(p)
|
||||
export_results.skipped_album = [
|
||||
(filename, album_skipped.name)
|
||||
for filename in export_results.skipped
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_skipped.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
if album_missing and export_results.missing:
|
||||
try:
|
||||
album_missing.add(p)
|
||||
export_results.missing_album = [
|
||||
(filename, album_missing.name)
|
||||
for filename in export_results.missing
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_missing.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
results += export_results
|
||||
|
||||
# all photo files (not including sidecars) that are part of this export set
|
||||
# used below for applying Finder tags, etc.
|
||||
photo_files = set(
|
||||
export_results.exported
|
||||
+ export_results.new
|
||||
+ export_results.updated
|
||||
+ export_results.exif_updated
|
||||
+ export_results.converted_to_jpeg
|
||||
+ export_results.skipped
|
||||
)
|
||||
|
||||
if finder_tag_keywords or finder_tag_template:
|
||||
tags_written, tags_skipped = write_finder_tags(
|
||||
p,
|
||||
photo_files,
|
||||
keywords=finder_tag_keywords,
|
||||
keyword_template=keyword_template,
|
||||
album_keyword=album_keyword,
|
||||
person_keyword=person_keyword,
|
||||
exiftool_merge_keywords=exiftool_merge_keywords,
|
||||
finder_tag_template=finder_tag_template,
|
||||
strip=strip,
|
||||
export_dir=dest,
|
||||
)
|
||||
results.xattr_written.extend(tags_written)
|
||||
results.xattr_skipped.extend(tags_skipped)
|
||||
|
||||
if xattr_template:
|
||||
xattr_written, xattr_skipped = write_extended_attributes(
|
||||
p,
|
||||
photo_files,
|
||||
xattr_template,
|
||||
strip=strip,
|
||||
export_dir=dest,
|
||||
)
|
||||
results.xattr_written.extend(xattr_written)
|
||||
results.xattr_skipped.extend(xattr_skipped)
|
||||
|
||||
if fp is not None:
|
||||
fp.close()
|
||||
if multiprocess:
|
||||
results = _export_photos_with_multiprocessing(
|
||||
photos, kwargs={**locals(), **globals()}
|
||||
)
|
||||
else:
|
||||
# some hackery to get the arguments for export_photos
|
||||
export_args = export_photos.__code__.co_varnames
|
||||
results = export_photos(
|
||||
**{
|
||||
k: v
|
||||
for k, v in {**locals(), **globals()}.items()
|
||||
if k in export_args
|
||||
}
|
||||
)
|
||||
|
||||
photo_str_total = "photos" if len(photos) != 1 else "photo"
|
||||
if update:
|
||||
@ -2082,8 +1923,6 @@ def export(
|
||||
click.echo(summary)
|
||||
stop_time = time.perf_counter()
|
||||
click.echo(f"Elapsed time: {format_sec_to_hhmmss(stop_time-start_time)}")
|
||||
else:
|
||||
click.echo("Did not find any photos to export")
|
||||
|
||||
# cleanup files and do report if needed
|
||||
if cleanup:
|
||||
@ -2124,16 +1963,102 @@ def export(
|
||||
export_db.close()
|
||||
|
||||
|
||||
def _export_with_profiler(args: Dict):
|
||||
""" "Run export with cProfile"""
|
||||
def _export_photos_with_multiprocessing(photos: List, kwargs: Dict) -> ExportResults():
|
||||
"""Run export using multiple processes"""
|
||||
try:
|
||||
args.pop("profile")
|
||||
num_procs = kwargs.get("multiprocess")
|
||||
except KeyError:
|
||||
pass
|
||||
raise ValueError("_export_runner called without multiprocess param")
|
||||
|
||||
cProfile.runctx(
|
||||
"_export(**args)", globals=globals(), locals=locals(), sort="tottime"
|
||||
)
|
||||
# build kwargs for export_photos
|
||||
# keep only the params export_photos expects
|
||||
export_args = export_photos.__code__.co_varnames
|
||||
kwargs = {arg: value for arg, value in kwargs.items() if arg in export_args}
|
||||
for arg in ["photosdb", "photos"]:
|
||||
kwargs.pop(arg, None)
|
||||
kwargs["photos"] = None
|
||||
|
||||
# can't pickle an open sqlite connection so ensure export_db is closed
|
||||
export_db = kwargs.get("export_db")
|
||||
export_db.close()
|
||||
|
||||
# verbose output?
|
||||
verbose = kwargs.get("verbose", None)
|
||||
|
||||
# get list of uuids to pass to export_photos
|
||||
uuids = [p.uuid for p in photos]
|
||||
uuid_chunks = [list(chunk) for chunk in divide(num_procs, uuids)]
|
||||
|
||||
# create a queue to communicate with processes
|
||||
q = mp.Queue()
|
||||
processes = []
|
||||
if len(uuid_chunks) < num_procs:
|
||||
num_procs = len(uuid_chunks)
|
||||
for i in range(num_procs):
|
||||
kwargs = kwargs.copy()
|
||||
kwargs["_mp_queue"] = q
|
||||
kwargs["_mp_process_total"] = num_procs
|
||||
kwargs["_mp_process_num"] = i
|
||||
kwargs["_mp_uuids"] = uuid_chunks[i]
|
||||
if not kwargs["_mp_uuids"]:
|
||||
click.echo(f"Out of UUIDs to process, skipping process {i}")
|
||||
continue
|
||||
click.echo(f"Starting process number #{i}")
|
||||
p = mp.Process(target=export_photos, kwargs=kwargs)
|
||||
p.start()
|
||||
processes.append(p)
|
||||
|
||||
class FakeProgress:
|
||||
def __init__(self):
|
||||
self.finished = False
|
||||
self.console = Console()
|
||||
|
||||
def add_task(self, task, total):
|
||||
pass
|
||||
|
||||
def update(self, task_id, completed):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
pass
|
||||
|
||||
progress_class = Progress if not verbose else FakeProgress
|
||||
export_results = ExportResults()
|
||||
with progress_class() as progress:
|
||||
tasks = []
|
||||
for i, p in enumerate(processes):
|
||||
tasks.append(
|
||||
progress.add_task(
|
||||
f"Process {i} ({len(uuid_chunks[i])} photos)...",
|
||||
total=len(uuid_chunks[i]),
|
||||
)
|
||||
)
|
||||
|
||||
while not progress.finished:
|
||||
while True:
|
||||
if not any(mp.active_children()):
|
||||
break
|
||||
try:
|
||||
results = q.get(timeout=0.5)
|
||||
# print(results)
|
||||
if results[1] == "VERBOSE":
|
||||
progress.console.print(f"{results[0]}: {results[2]}")
|
||||
# verbose_(f"{results[0]}: {results[2]}")
|
||||
elif results[1] == "DONE":
|
||||
# click.echo(f"Process {results[0]} is done")
|
||||
export_results += ExportResults(**results[2])
|
||||
if isinstance(progress, FakeProgress):
|
||||
progress.finished = True
|
||||
elif results[1] == "PROGRESS":
|
||||
progress.update(tasks[results[0]], completed=results[2])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
click.echo("All processes finished")
|
||||
return export_results
|
||||
|
||||
|
||||
@cli.command()
|
||||
@ -2582,100 +2507,387 @@ def print_photo_info(photos, json=False):
|
||||
csv_writer.writerow(row)
|
||||
|
||||
|
||||
def export_photos(
|
||||
add_exported_to_album,
|
||||
add_missing_to_album,
|
||||
add_skipped_to_album,
|
||||
album_keyword,
|
||||
convert_to_jpeg,
|
||||
current_name,
|
||||
db,
|
||||
description_template,
|
||||
dest,
|
||||
directory,
|
||||
download_missing,
|
||||
dry_run,
|
||||
edited_suffix,
|
||||
exiftool_merge_keywords,
|
||||
exiftool_merge_persons,
|
||||
exiftool_option,
|
||||
exiftool_path,
|
||||
exiftool,
|
||||
export_as_hardlink,
|
||||
export_by_date,
|
||||
export_db,
|
||||
export_edited,
|
||||
export_live,
|
||||
export_raw,
|
||||
filename_template,
|
||||
fileutil,
|
||||
finder_tag_keywords,
|
||||
finder_tag_template,
|
||||
ignore_date_modified,
|
||||
ignore_signature,
|
||||
jpeg_ext,
|
||||
jpeg_quality,
|
||||
keyword_template,
|
||||
multiprocess,
|
||||
original_suffix,
|
||||
overwrite,
|
||||
person_keyword,
|
||||
photos,
|
||||
post_command,
|
||||
post_function,
|
||||
preview_if_missing,
|
||||
preview_suffix,
|
||||
preview,
|
||||
replace_keywords,
|
||||
retry,
|
||||
sidecar_drop_ext,
|
||||
sidecar,
|
||||
skip_original_if_edited,
|
||||
strip,
|
||||
touch_file,
|
||||
update,
|
||||
use_photokit,
|
||||
use_photos_export,
|
||||
verbose,
|
||||
verbose_,
|
||||
xattr_template,
|
||||
_mp_uuids=None,
|
||||
_mp_process_total=None,
|
||||
_mp_process_num=None,
|
||||
_mp_queue=None,
|
||||
**kwargs,
|
||||
):
|
||||
"""export photos"""
|
||||
|
||||
# Need to pass the verbose_ method if for multiprocessing to work
|
||||
_mp_verbose = None
|
||||
if multiprocess:
|
||||
_mp_queue.put(
|
||||
[
|
||||
_mp_process_num,
|
||||
"START",
|
||||
f"multiprocess mode: {_mp_process_num}, {_mp_process_total}",
|
||||
]
|
||||
)
|
||||
|
||||
def _mp_verbose(*args, **kwargs):
|
||||
_mp_queue.put([_mp_process_num, "VERBOSE", args])
|
||||
|
||||
verbose_ = _mp_verbose
|
||||
photosdb = osxphotos.PhotosDB(db, verbose=verbose_)
|
||||
verbose_(f"_mp_uuids: {len(_mp_uuids)}")
|
||||
photos = photosdb.photos_by_uuid(_mp_uuids)
|
||||
verbose_(f"photos: {len(photos)}")
|
||||
|
||||
results = ExportResults()
|
||||
num_photos = len(photos)
|
||||
# though the command line option is current_name, internally all processing
|
||||
# logic uses original_name which is the boolean inverse of current_name
|
||||
# because the original code used --original-name as an option
|
||||
original_name = not current_name
|
||||
|
||||
# set up for --add-export-to-album if needed
|
||||
album_export = (
|
||||
PhotosAlbum(add_exported_to_album, verbose=verbose_)
|
||||
if add_exported_to_album
|
||||
else None
|
||||
)
|
||||
album_skipped = (
|
||||
PhotosAlbum(add_skipped_to_album, verbose=verbose_)
|
||||
if add_skipped_to_album
|
||||
else None
|
||||
)
|
||||
album_missing = (
|
||||
PhotosAlbum(add_missing_to_album, verbose=verbose_)
|
||||
if add_missing_to_album
|
||||
else None
|
||||
)
|
||||
|
||||
photo_num = 0
|
||||
# send progress bar output to /dev/null if verbose or multiprocess to hide the progress bar
|
||||
fp = open(os.devnull, "w") if verbose or multiprocess else None
|
||||
with click.progressbar(photos, show_pos=True, file=fp) as bar:
|
||||
for p in bar:
|
||||
photo_num += 1
|
||||
if multiprocess:
|
||||
_mp_queue.put([_mp_process_num, "PROGRESS", photo_num, num_photos])
|
||||
export_results = export_photo(
|
||||
photo=p,
|
||||
dest=dest,
|
||||
album_keyword=album_keyword,
|
||||
convert_to_jpeg=convert_to_jpeg,
|
||||
description_template=description_template,
|
||||
directory=directory,
|
||||
download_missing=download_missing,
|
||||
dry_run=dry_run,
|
||||
edited_suffix=edited_suffix,
|
||||
exiftool_merge_keywords=exiftool_merge_keywords,
|
||||
exiftool_merge_persons=exiftool_merge_persons,
|
||||
exiftool_option=exiftool_option,
|
||||
exiftool=exiftool,
|
||||
export_as_hardlink=export_as_hardlink,
|
||||
export_by_date=export_by_date,
|
||||
export_db=export_db,
|
||||
export_dir=dest,
|
||||
export_edited=export_edited,
|
||||
export_live=export_live,
|
||||
export_preview=preview,
|
||||
export_raw=export_raw,
|
||||
filename_template=filename_template,
|
||||
fileutil=fileutil,
|
||||
ignore_date_modified=ignore_date_modified,
|
||||
ignore_signature=ignore_signature,
|
||||
jpeg_ext=jpeg_ext,
|
||||
jpeg_quality=jpeg_quality,
|
||||
keyword_template=keyword_template,
|
||||
num_photos=num_photos,
|
||||
original_name=original_name,
|
||||
original_suffix=original_suffix,
|
||||
overwrite=overwrite,
|
||||
person_keyword=person_keyword,
|
||||
photo_num=photo_num,
|
||||
preview_if_missing=preview_if_missing,
|
||||
preview_suffix=preview_suffix,
|
||||
replace_keywords=replace_keywords,
|
||||
retry=retry,
|
||||
sidecar_drop_ext=sidecar_drop_ext,
|
||||
sidecar=sidecar,
|
||||
skip_original_if_edited=skip_original_if_edited,
|
||||
strip=strip,
|
||||
touch_file=touch_file,
|
||||
update=update,
|
||||
use_photokit=use_photokit,
|
||||
use_photos_export=use_photos_export,
|
||||
verbose_=verbose_,
|
||||
verbose=verbose,
|
||||
_mp_verbose=_mp_verbose,
|
||||
)
|
||||
|
||||
if post_function:
|
||||
for function in post_function:
|
||||
# post function is tuple of (function, filename.py::function_name)
|
||||
verbose_(f"Calling post-function {function[1]}")
|
||||
if not dry_run:
|
||||
try:
|
||||
function[0](p, export_results, verbose_)
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error running post-function {function[1]}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
run_post_command(
|
||||
photo=p,
|
||||
post_command=post_command,
|
||||
export_results=export_results,
|
||||
export_dir=dest,
|
||||
dry_run=dry_run,
|
||||
exiftool_path=exiftool_path,
|
||||
export_db=export_db,
|
||||
)
|
||||
|
||||
if album_export and export_results.exported:
|
||||
try:
|
||||
album_export.add(p)
|
||||
export_results.exported_album = [
|
||||
(filename, album_export.name)
|
||||
for filename in export_results.exported
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_export.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
if album_skipped and export_results.skipped:
|
||||
try:
|
||||
album_skipped.add(p)
|
||||
export_results.skipped_album = [
|
||||
(filename, album_skipped.name)
|
||||
for filename in export_results.skipped
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_skipped.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
if album_missing and export_results.missing:
|
||||
try:
|
||||
album_missing.add(p)
|
||||
export_results.missing_album = [
|
||||
(filename, album_missing.name)
|
||||
for filename in export_results.missing
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_missing.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
results += export_results
|
||||
|
||||
# all photo files (not including sidecars) that are part of this export set
|
||||
# used below for applying Finder tags, etc.
|
||||
photo_files = set(
|
||||
export_results.exported
|
||||
+ export_results.new
|
||||
+ export_results.updated
|
||||
+ export_results.exif_updated
|
||||
+ export_results.converted_to_jpeg
|
||||
+ export_results.skipped
|
||||
)
|
||||
|
||||
if finder_tag_keywords or finder_tag_template:
|
||||
tags_written, tags_skipped = write_finder_tags(
|
||||
p,
|
||||
photo_files,
|
||||
keywords=finder_tag_keywords,
|
||||
keyword_template=keyword_template,
|
||||
album_keyword=album_keyword,
|
||||
person_keyword=person_keyword,
|
||||
exiftool_merge_keywords=exiftool_merge_keywords,
|
||||
finder_tag_template=finder_tag_template,
|
||||
strip=strip,
|
||||
export_dir=dest,
|
||||
)
|
||||
results.xattr_written.extend(tags_written)
|
||||
results.xattr_skipped.extend(tags_skipped)
|
||||
|
||||
if xattr_template:
|
||||
xattr_written, xattr_skipped = write_extended_attributes(
|
||||
p,
|
||||
photo_files,
|
||||
xattr_template,
|
||||
strip=strip,
|
||||
export_dir=dest,
|
||||
)
|
||||
results.xattr_written.extend(xattr_written)
|
||||
results.xattr_skipped.extend(xattr_skipped)
|
||||
|
||||
if fp is not None:
|
||||
fp.close()
|
||||
|
||||
if multiprocess:
|
||||
_mp_queue.put([_mp_process_num, "DONE", results.asdict()])
|
||||
else:
|
||||
return results
|
||||
|
||||
|
||||
def export_photo(
|
||||
photo=None,
|
||||
dest=None,
|
||||
verbose=None,
|
||||
export_by_date=None,
|
||||
sidecar=None,
|
||||
sidecar_drop_ext=False,
|
||||
update=None,
|
||||
ignore_signature=None,
|
||||
export_as_hardlink=None,
|
||||
overwrite=None,
|
||||
export_edited=None,
|
||||
skip_original_if_edited=None,
|
||||
original_name=None,
|
||||
export_live=None,
|
||||
album_keyword=None,
|
||||
convert_to_jpeg=False,
|
||||
description_template=None,
|
||||
directory=None,
|
||||
download_missing=None,
|
||||
exiftool=None,
|
||||
dry_run=None,
|
||||
edited_suffix="_edited",
|
||||
exiftool_merge_keywords=False,
|
||||
exiftool_merge_persons=False,
|
||||
directory=None,
|
||||
filename_template=None,
|
||||
export_raw=None,
|
||||
album_keyword=None,
|
||||
person_keyword=None,
|
||||
keyword_template=None,
|
||||
description_template=None,
|
||||
export_db=None,
|
||||
fileutil=FileUtil,
|
||||
dry_run=None,
|
||||
touch_file=None,
|
||||
edited_suffix="_edited",
|
||||
original_suffix="",
|
||||
use_photos_export=False,
|
||||
convert_to_jpeg=False,
|
||||
jpeg_quality=1.0,
|
||||
ignore_date_modified=False,
|
||||
use_photokit=False,
|
||||
exiftool_option=None,
|
||||
strip=False,
|
||||
exiftool=None,
|
||||
export_as_hardlink=None,
|
||||
export_by_date=None,
|
||||
export_db=None,
|
||||
export_dir=None,
|
||||
export_edited=None,
|
||||
export_live=None,
|
||||
export_preview=False,
|
||||
export_raw=None,
|
||||
filename_template=None,
|
||||
fileutil=FileUtil,
|
||||
ignore_date_modified=False,
|
||||
ignore_signature=None,
|
||||
jpeg_ext=None,
|
||||
jpeg_quality=1.0,
|
||||
keyword_template=None,
|
||||
num_photos=1,
|
||||
original_name=None,
|
||||
original_suffix="",
|
||||
overwrite=None,
|
||||
person_keyword=None,
|
||||
photo_num=1,
|
||||
preview_if_missing=False,
|
||||
preview_suffix=None,
|
||||
replace_keywords=False,
|
||||
retry=0,
|
||||
export_dir=None,
|
||||
export_preview=False,
|
||||
preview_suffix=None,
|
||||
preview_if_missing=False,
|
||||
photo_num=1,
|
||||
num_photos=1,
|
||||
sidecar_drop_ext=False,
|
||||
sidecar=None,
|
||||
skip_original_if_edited=None,
|
||||
strip=False,
|
||||
touch_file=None,
|
||||
update=None,
|
||||
use_photokit=False,
|
||||
use_photos_export=False,
|
||||
verbose_=None,
|
||||
verbose=None,
|
||||
_mp_verbose=None,
|
||||
):
|
||||
"""Helper function for export that does the actual export
|
||||
|
||||
Args:
|
||||
photo: PhotoInfo object
|
||||
dest: destination path as string
|
||||
verbose: boolean; print verbose output
|
||||
export_by_date: boolean; create export folder in form dest/YYYY/MM/DD
|
||||
sidecar: list zero, 1 or 2 of ["json","xmp"] of sidecar variety to export
|
||||
sidecar_drop_ext: boolean; if True, drops photo extension from sidecar name
|
||||
export_as_hardlink: boolean; hardlink files instead of copying them
|
||||
overwrite: boolean; overwrite dest file if it already exists
|
||||
original_name: boolean; use original filename instead of current filename
|
||||
export_live: boolean; also export live video component if photo is a live photo
|
||||
live video will have same name as photo but with .mov extension
|
||||
download_missing: attempt download of missing iCloud photos
|
||||
exiftool: use exiftool to write EXIF metadata directly to exported photo
|
||||
directory: template used to determine output directory
|
||||
filename_template: template use to determine output file
|
||||
export_raw: boolean; if True exports raw image associate with the photo
|
||||
export_edited: boolean; if True exports edited version of photo if there is one
|
||||
skip_original_if_edited: boolean; if True does not export original if photo has been edited
|
||||
album_keyword: boolean; if True, exports album names as keywords in metadata
|
||||
person_keyword: boolean; if True, exports person names as keywords in metadata
|
||||
keyword_template: list of strings; if provided use rendered template strings as keywords
|
||||
description_template: string; optional template string that will be rendered for use as photo description
|
||||
export_db: export database instance compatible with ExportDB_ABC
|
||||
fileutil: file util class compatible with FileUtilABC
|
||||
dry_run: boolean; if True, doesn't actually export or update any files
|
||||
touch_file: boolean; sets file's modification time to match photo date
|
||||
use_photos_export: boolean; if True forces the use of AppleScript to export even if photo not missing
|
||||
convert_to_jpeg: boolean; if True, converts non-jpeg images to jpeg
|
||||
jpeg_quality: float in range 0.0 <= jpeg_quality <= 1.0. A value of 1.0 specifies use best quality, a value of 0.0 specifies use maximum compression.
|
||||
ignore_date_modified: if True, sets EXIF:ModifyDate to EXIF:DateTimeOriginal even if date_modified is set
|
||||
exiftool_option: optional list flags (e.g. ["-m", "-F"]) to pass to exiftool
|
||||
description_template: string; optional template string that will be rendered for use as photo description
|
||||
directory: template used to determine output directory
|
||||
download_missing: attempt download of missing iCloud photos
|
||||
dry_run: boolean; if True, doesn't actually export or update any files
|
||||
exiftool_merge_keywords: boolean; if True, merged keywords found in file's exif data (requires exiftool)
|
||||
exiftool_merge_persons: boolean; if True, merged persons found in file's exif data (requires exiftool)
|
||||
exiftool_option: optional list flags (e.g. ["-m", "-F"]) to pass to exiftool
|
||||
exiftool: use exiftool to write EXIF metadata directly to exported photo
|
||||
export_as_hardlink: boolean; hardlink files instead of copying them
|
||||
export_by_date: boolean; create export folder in form dest/YYYY/MM/DD
|
||||
export_db: export database instance compatible with ExportDB_ABC
|
||||
export_dir: top-level export directory for {export_dir} template
|
||||
export_edited: boolean; if True exports edited version of photo if there is one
|
||||
export_live: boolean; also export live video component if photo is a live photo; live video will have same name as photo but with .mov extension
|
||||
export_preview: export the preview image generated by Photos
|
||||
export_raw: boolean; if True exports raw image associate with the photo
|
||||
filename_template: template use to determine output file
|
||||
fileutil: file util class compatible with FileUtilABC
|
||||
ignore_date_modified: if True, sets EXIF:ModifyDate to EXIF:DateTimeOriginal even if date_modified is set
|
||||
jpeg_ext: if not None, specify the extension to use for all JPEG images on export
|
||||
jpeg_quality: float in range 0.0 <= jpeg_quality <= 1.0. A value of 1.0 specifies use best quality, a value of 0.0 specifies use maximum compression.
|
||||
keyword_template: list of strings; if provided use rendered template strings as keywords
|
||||
num_photos: int, total number of photos that will be exported
|
||||
original_name: boolean; use original filename instead of current filename
|
||||
overwrite: boolean; overwrite dest file if it already exists
|
||||
person_keyword: boolean; if True, exports person names as keywords in metadata
|
||||
photo_num: int, which number photo in total of num_photos is being exported
|
||||
preview_if_missing: bool, export preview if original is missing
|
||||
preview_suffix: str, template to use as suffix for preview images
|
||||
replace_keywords: if True, --keyword-template replaces keywords instead of adding keywords
|
||||
retry: retry up to retry # of times if there's an error
|
||||
export_dir: top-level export directory for {export_dir} template
|
||||
export_preview: export the preview image generated by Photos
|
||||
preview_suffix: str, template to use as suffix for preview images
|
||||
preview_if_missing: bool, export preview if original is missing
|
||||
photo_num: int, which number photo in total of num_photos is being exported
|
||||
num_photos: int, total number of photos that will be exported
|
||||
sidecar_drop_ext: boolean; if True, drops photo extension from sidecar name
|
||||
sidecar: list zero, 1 or 2 of ["json","xmp"] of sidecar variety to export
|
||||
skip_original_if_edited: boolean; if True does not export original if photo has been edited
|
||||
touch_file: boolean; sets file's modification time to match photo date
|
||||
use_photos_export: boolean; if True forces the use of AppleScript to export even if photo not missing
|
||||
verbose_: Callable; verbose output function
|
||||
verbose: bool; print verbose output
|
||||
_mp_verbose: Callable; print verbose output for multiprocessing
|
||||
|
||||
Returns:
|
||||
list of path(s) of exported photo or None if photo was missing
|
||||
@ -2683,8 +2895,7 @@ def export_photo(
|
||||
Raises:
|
||||
ValueError on invalid filename_template
|
||||
"""
|
||||
global VERBOSE
|
||||
VERBOSE = bool(verbose)
|
||||
verbose_ = _mp_verbose or verbose_
|
||||
|
||||
export_original = not (skip_original_if_edited and photo.hasadjustments)
|
||||
|
||||
@ -2801,11 +3012,12 @@ def export_photo(
|
||||
)
|
||||
|
||||
results += export_photo_to_directory(
|
||||
photo=photo,
|
||||
dest=dest,
|
||||
album_keyword=album_keyword,
|
||||
convert_to_jpeg=convert_to_jpeg,
|
||||
description_template=description_template,
|
||||
dest_path=dest_path,
|
||||
dest=dest,
|
||||
download_missing=download_missing,
|
||||
dry_run=dry_run,
|
||||
edited=False,
|
||||
@ -2830,7 +3042,6 @@ def export_photo(
|
||||
missing=missing_original,
|
||||
overwrite=overwrite,
|
||||
person_keyword=person_keyword,
|
||||
photo=photo,
|
||||
preview_if_missing=preview_if_missing,
|
||||
preview_suffix=rendered_preview_suffix,
|
||||
replace_keywords=replace_keywords,
|
||||
@ -2839,9 +3050,11 @@ def export_photo(
|
||||
sidecar_flags=sidecar_flags,
|
||||
touch_file=touch_file,
|
||||
update=update,
|
||||
use_photos_export=use_photos_export,
|
||||
use_photokit=use_photokit,
|
||||
use_photos_export=use_photos_export,
|
||||
verbose_=verbose_,
|
||||
verbose=verbose,
|
||||
_mp_verbose=_mp_verbose,
|
||||
)
|
||||
|
||||
if export_edited and photo.hasadjustments:
|
||||
@ -2913,11 +3126,12 @@ def export_photo(
|
||||
)
|
||||
|
||||
results += export_photo_to_directory(
|
||||
photo=photo,
|
||||
dest=dest,
|
||||
album_keyword=album_keyword,
|
||||
convert_to_jpeg=convert_to_jpeg,
|
||||
description_template=description_template,
|
||||
dest_path=dest_path,
|
||||
dest=dest,
|
||||
download_missing=download_missing,
|
||||
dry_run=dry_run,
|
||||
edited=True,
|
||||
@ -2942,7 +3156,6 @@ def export_photo(
|
||||
missing=missing_edited,
|
||||
overwrite=overwrite,
|
||||
person_keyword=person_keyword,
|
||||
photo=photo,
|
||||
preview_if_missing=preview_if_missing,
|
||||
preview_suffix=rendered_preview_suffix,
|
||||
replace_keywords=replace_keywords,
|
||||
@ -2951,9 +3164,11 @@ def export_photo(
|
||||
sidecar_flags=sidecar_flags if not export_original else 0,
|
||||
touch_file=touch_file,
|
||||
update=update,
|
||||
use_photos_export=use_photos_export,
|
||||
use_photokit=use_photokit,
|
||||
use_photos_export=use_photos_export,
|
||||
verbose_=verbose_,
|
||||
verbose=verbose,
|
||||
_mp_verbose=_mp_verbose,
|
||||
)
|
||||
|
||||
return results
|
||||
@ -3037,13 +3252,17 @@ def export_photo_to_directory(
|
||||
use_photos_export,
|
||||
use_photokit,
|
||||
verbose,
|
||||
verbose_,
|
||||
_mp_verbose=None,
|
||||
):
|
||||
"""Export photo to directory dest_path"""
|
||||
# Need to pass the verbose_ method if for multiprocessing to work
|
||||
verbose_ = _mp_verbose or verbose_
|
||||
|
||||
results = ExportResults()
|
||||
# TODO: can be updated to let export do all the missing logic
|
||||
if export_original:
|
||||
if missing and not preview_if_missing:
|
||||
if missing and not any([preview_if_missing, download_missing, use_photos_export]):
|
||||
space = " " if not verbose else ""
|
||||
verbose_(
|
||||
f"{space}Skipping missing photo {photo.original_filename} ({photo.uuid})"
|
||||
@ -3067,7 +3286,7 @@ def export_photo_to_directory(
|
||||
return results
|
||||
else:
|
||||
# exporting the edited version
|
||||
if missing and not preview_if_missing:
|
||||
if missing and not any([preview_if_missing, download_missing, use_photos_export]):
|
||||
space = " " if not verbose else ""
|
||||
verbose_(f"{space}Skipping missing edited photo for {filename}")
|
||||
results.missing.append(str(pathlib.Path(dest_path) / filename))
|
||||
@ -4112,11 +4331,13 @@ def _list_libraries(json_=False, error=True):
|
||||
default=False,
|
||||
help="Include filename of selected photos in output",
|
||||
)
|
||||
def uuid(ctx, cli_obj, filename):
|
||||
def uuid_(ctx, cli_obj, filename):
|
||||
"""Print out unique IDs (UUID) of photos selected in Photos
|
||||
|
||||
Prints outs UUIDs in form suitable for --uuid-from-file and --skip-uuid-from-file
|
||||
"""
|
||||
# Note: This is named uuid_ because multiprocessing complains about use of photo.uuid if
|
||||
# this function is also called uuid. Something weird happenign with pickling.
|
||||
for photo in photoscript.PhotosLibrary().selection:
|
||||
if filename:
|
||||
print(f"# {photo.filename}")
|
||||
|
||||
@ -117,6 +117,9 @@ class ExportDB_ABC(ABC):
|
||||
):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_connection(self):
|
||||
pass
|
||||
|
||||
class ExportDBNoOp(ExportDB_ABC):
|
||||
"""An ExportDB with NoOp methods"""
|
||||
@ -196,6 +199,8 @@ class ExportDBNoOp(ExportDB_ABC):
|
||||
):
|
||||
pass
|
||||
|
||||
def get_connection(self):
|
||||
pass
|
||||
|
||||
class ExportDB(ExportDB_ABC):
|
||||
"""Interface to sqlite3 database used to store state information for osxphotos export command"""
|
||||
@ -216,7 +221,7 @@ class ExportDB(ExportDB_ABC):
|
||||
returns None if filename not found in database
|
||||
"""
|
||||
filepath_normalized = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -234,7 +239,7 @@ class ExportDB(ExportDB_ABC):
|
||||
"""set UUID of filename to uuid in the database"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path))
|
||||
filename_normalized = self._normalize_filepath(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -254,7 +259,7 @@ class ExportDB(ExportDB_ABC):
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -272,7 +277,7 @@ class ExportDB(ExportDB_ABC):
|
||||
returns: tuple of (mode, size, mtime)
|
||||
"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -311,7 +316,7 @@ class ExportDB(ExportDB_ABC):
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -329,7 +334,7 @@ class ExportDB(ExportDB_ABC):
|
||||
returns: tuple of (mode, size, mtime)
|
||||
"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -362,7 +367,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_info_for_uuid(self, uuid):
|
||||
"""returns the info JSON struct for a UUID"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute("SELECT json_info FROM info WHERE uuid = ?", (uuid,))
|
||||
@ -376,7 +381,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def set_info_for_uuid(self, uuid, info):
|
||||
"""sets the info JSON struct for a UUID"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -390,7 +395,7 @@ class ExportDB(ExportDB_ABC):
|
||||
def get_exifdata_for_file(self, filename):
|
||||
"""returns the exifdata JSON struct for a file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -408,7 +413,7 @@ class ExportDB(ExportDB_ABC):
|
||||
def set_exifdata_for_file(self, filename, exifdata):
|
||||
"""sets the exifdata JSON struct for a file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -422,7 +427,7 @@ class ExportDB(ExportDB_ABC):
|
||||
def get_sidecar_for_file(self, filename):
|
||||
"""returns the sidecar data and signature for a file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -450,7 +455,7 @@ class ExportDB(ExportDB_ABC):
|
||||
def set_sidecar_for_file(self, filename, sidecar_data, sidecar_sig):
|
||||
"""sets the sidecar data and signature for a file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -463,7 +468,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_previous_uuids(self):
|
||||
"""returns list of UUIDs of previously exported photos found in export database"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
previous_uuids = []
|
||||
try:
|
||||
c = conn.cursor()
|
||||
@ -476,7 +481,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_detected_text_for_uuid(self, uuid):
|
||||
"""Get the detected_text for a uuid"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -493,7 +498,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def set_detected_text_for_uuid(self, uuid, text_json):
|
||||
"""Set the detected text for uuid"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@ -521,7 +526,7 @@ class ExportDB(ExportDB_ABC):
|
||||
"""sets all the data for file and uuid at once; if any value is None, does not set it"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path))
|
||||
filename_normalized = self._normalize_filepath(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
# update files table (if needed);
|
||||
@ -577,16 +582,23 @@ class ExportDB(ExportDB_ABC):
|
||||
def close(self):
|
||||
"""close the database connection"""
|
||||
try:
|
||||
self._conn.close()
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
self._conn = None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def get_connection(self):
|
||||
if self._conn is None:
|
||||
self._conn = self._open_export_db(self._dbfile)
|
||||
return self._conn
|
||||
|
||||
def _set_stat_for_file(self, table, filename, stats):
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"INSERT OR REPLACE INTO {table}(filepath_normalized, mode, size, mtime) VALUES (?, ?, ?, ?);",
|
||||
@ -596,7 +608,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def _get_stat_for_file(self, table, filename):
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"SELECT mode, size, mtime FROM {table} WHERE filepath_normalized = ?",
|
||||
@ -637,6 +649,14 @@ class ExportDB(ExportDB_ABC):
|
||||
else:
|
||||
self.was_upgraded = ()
|
||||
self.version = OSXPHOTOS_EXPORTDB_VERSION
|
||||
|
||||
# turn on performance optimizations
|
||||
c = conn.cursor()
|
||||
c.execute("PRAGMA journal_mode=WAL;")
|
||||
c.execute("PRAGMA synchronous=NORMAL;")
|
||||
c.execute("PRAGMA cache_size=-100000;")
|
||||
c.execute("PRAGMA temp_store=MEMORY;")
|
||||
|
||||
return conn
|
||||
|
||||
def _get_db_connection(self, dbfile):
|
||||
@ -777,7 +797,7 @@ class ExportDB(ExportDB_ABC):
|
||||
cmd = sys.argv[0]
|
||||
args = " ".join(sys.argv[1:]) if len(sys.argv) > 1 else ""
|
||||
cwd = os.getcwd()
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
|
||||
@ -1,14 +1,17 @@
|
||||
""" utility functions for validating/sanitizing path components """
|
||||
|
||||
import re
|
||||
|
||||
import pathvalidate
|
||||
|
||||
from ._constants import MAX_DIRNAME_LEN, MAX_FILENAME_LEN
|
||||
|
||||
__all__ = [
|
||||
"sanitize_filepath",
|
||||
"is_valid_filepath",
|
||||
"sanitize_filename",
|
||||
"sanitize_dirname",
|
||||
"sanitize_filename",
|
||||
"sanitize_filepath",
|
||||
"sanitize_filestem_with_count",
|
||||
"sanitize_pathpart",
|
||||
]
|
||||
|
||||
@ -53,6 +56,26 @@ def sanitize_filename(filename, replacement=":"):
|
||||
return filename
|
||||
|
||||
|
||||
def sanitize_filestem_with_count(file_stem: str, file_suffix: str) -> str:
|
||||
"""Sanitize a filestem that may end in (1), (2), etc. to ensure it + file_suffix doesn't exceed MAX_FILENAME_LEN"""
|
||||
filename_len = len(file_stem) + len(file_suffix)
|
||||
if filename_len <= MAX_FILENAME_LEN:
|
||||
return file_stem
|
||||
|
||||
drop = filename_len - MAX_FILENAME_LEN
|
||||
match = re.match(r"(.*)(\(\d+\))$", file_stem)
|
||||
if not match:
|
||||
# filename doesn't end in (1), (2), etc.
|
||||
# truncate filename to MAX_FILENAME_LEN
|
||||
return file_stem[:-drop]
|
||||
|
||||
# filename ends in (1), (2), etc.
|
||||
file_stem = match.group(1)
|
||||
file_count = match.group(2)
|
||||
file_stem = file_stem[:-drop]
|
||||
return f"{file_stem}{file_count}"
|
||||
|
||||
|
||||
def sanitize_dirname(dirname, replacement=":"):
|
||||
"""replace any illegal characters in a directory name and truncate directory name if needed
|
||||
|
||||
|
||||
@ -347,6 +347,34 @@ class ExportResults:
|
||||
+ ")"
|
||||
)
|
||||
|
||||
def asdict(self):
|
||||
"""Return dict instance of class"""
|
||||
return {
|
||||
"exported": self.exported,
|
||||
"new": self.new,
|
||||
"updated": self.updated,
|
||||
"skipped": self.skipped,
|
||||
"exif_updated": self.exif_updated,
|
||||
"touched": self.touched,
|
||||
"to_touch": self.to_touch,
|
||||
"converted_to_jpeg": self.converted_to_jpeg,
|
||||
"sidecar_json_written": self.sidecar_json_written,
|
||||
"sidecar_json_skipped": self.sidecar_json_skipped,
|
||||
"sidecar_exiftool_written": self.sidecar_exiftool_written,
|
||||
"sidecar_exiftool_skipped": self.sidecar_exiftool_skipped,
|
||||
"sidecar_xmp_written": self.sidecar_xmp_written,
|
||||
"sidecar_xmp_skipped": self.sidecar_xmp_skipped,
|
||||
"missing": self.missing,
|
||||
"error": self.error,
|
||||
"exiftool_warning": self.exiftool_warning,
|
||||
"exiftool_error": self.exiftool_error,
|
||||
"deleted_files": self.deleted_files,
|
||||
"deleted_directories": self.deleted_directories,
|
||||
"exported_album": self.exported_album,
|
||||
"skipped_album": self.skipped_album,
|
||||
"missing_album": self.missing_album,
|
||||
}
|
||||
|
||||
|
||||
class PhotoExporter:
|
||||
def __init__(self, photo: "PhotoInfo"):
|
||||
@ -507,7 +535,7 @@ class PhotoExporter:
|
||||
preview_name = (
|
||||
preview_name
|
||||
if options.overwrite or options.update
|
||||
else pathlib.Path(increment_filename(preview_name))
|
||||
else pathlib.Path(increment_filename(preview_name, lock=True))
|
||||
)
|
||||
all_results += self._export_photo(
|
||||
preview_path,
|
||||
@ -520,6 +548,13 @@ class PhotoExporter:
|
||||
if options.touch_file:
|
||||
all_results += self._touch_files(all_results, options)
|
||||
|
||||
# if src was missing, there will be a lock file for dest that needs cleaning up
|
||||
try:
|
||||
lock_file = dest.parent / f".{dest.name}.lock"
|
||||
self.fileutil.unlink(lock_file)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return all_results
|
||||
|
||||
def _touch_files(
|
||||
@ -578,7 +613,9 @@ class PhotoExporter:
|
||||
# if file1.png exists and exporting file1.jpeg,
|
||||
# dest will be file1 (1).jpeg even though file1.jpeg doesn't exist to prevent sidecar collision
|
||||
if options.increment and not options.update and not options.overwrite:
|
||||
return pathlib.Path(increment_filename(dest))
|
||||
return pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# if update and file exists, need to check to see if it's the write file by checking export db
|
||||
if options.update and dest.exists() and src:
|
||||
@ -621,7 +658,9 @@ class PhotoExporter:
|
||||
break
|
||||
else:
|
||||
# increment the destination file
|
||||
dest = pathlib.Path(increment_filename(dest))
|
||||
dest = pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# either dest was updated in the if clause above or not updated at all
|
||||
return dest
|
||||
@ -815,7 +854,9 @@ class PhotoExporter:
|
||||
raise ValueError("Edited version requested but photo has no adjustments")
|
||||
|
||||
dest = self._temp_dir_path / self.photo.original_filename
|
||||
dest = pathlib.Path(increment_filename(dest))
|
||||
dest = pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# export live_photo .mov file?
|
||||
live_photo = bool(options.live_photo and self.photo.live_photo)
|
||||
@ -915,7 +956,7 @@ class PhotoExporter:
|
||||
"""Copies filepath to a temp file preserving access and modification times"""
|
||||
filepath = pathlib.Path(filepath)
|
||||
dest = self._temp_dir_path / filepath.name
|
||||
dest = increment_filename(dest)
|
||||
dest = increment_filename(dest, lock=True)
|
||||
self.fileutil.copy(filepath, dest)
|
||||
stat = os.stat(filepath)
|
||||
self.fileutil.utime(dest, (stat.st_atime, stat.st_mtime))
|
||||
@ -1080,7 +1121,9 @@ class PhotoExporter:
|
||||
# convert to a temp file before copying
|
||||
tmp_file = increment_filename(
|
||||
self._temp_dir_path
|
||||
/ f"{pathlib.Path(src).stem}_converted_to_jpeg.jpeg"
|
||||
/ f"{pathlib.Path(src).stem}_converted_to_jpeg.jpeg",
|
||||
lock=True,
|
||||
dry_run=options.dry_run,
|
||||
)
|
||||
fileutil.convert_to_jpeg(
|
||||
src, tmp_file, compression_quality=options.jpeg_quality
|
||||
@ -1111,6 +1154,20 @@ class PhotoExporter:
|
||||
info_json=self.photo.json(),
|
||||
)
|
||||
|
||||
# clean up lock files
|
||||
for file_ in set(
|
||||
converted_to_jpeg_files
|
||||
+ exported_files
|
||||
+ update_new_files
|
||||
+ update_updated_files
|
||||
):
|
||||
try:
|
||||
file_ = pathlib.Path(file_)
|
||||
lock_file = str(file_.parent / f".{file_.name}.lock")
|
||||
fileutil.unlink(lock_file)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ExportResults(
|
||||
converted_to_jpeg=converted_to_jpeg_files,
|
||||
error=exif_results.error,
|
||||
|
||||
@ -17,13 +17,14 @@ import sys
|
||||
import unicodedata
|
||||
import urllib.parse
|
||||
from plistlib import load as plistload
|
||||
from typing import Callable, List, Union, Optional
|
||||
from typing import Callable, List, Optional, Union
|
||||
|
||||
import CoreFoundation
|
||||
import objc
|
||||
from Foundation import NSFileManager, NSPredicate, NSString
|
||||
|
||||
from ._constants import UNICODE_FORMAT
|
||||
from .path_utils import sanitize_filestem_with_count
|
||||
|
||||
__all__ = [
|
||||
"dd_to_dms_str",
|
||||
@ -428,7 +429,10 @@ def normalize_unicode(value):
|
||||
|
||||
|
||||
def increment_filename_with_count(
|
||||
filepath: Union[str, pathlib.Path], count: int = 0
|
||||
filepath: Union[str, pathlib.Path],
|
||||
count: int = 0,
|
||||
lock: bool = False,
|
||||
dry_run: bool = False,
|
||||
) -> str:
|
||||
"""Return filename (1).ext, etc if filename.ext exists
|
||||
|
||||
@ -438,6 +442,8 @@ def increment_filename_with_count(
|
||||
Args:
|
||||
filepath: str or pathlib.Path; full path, including file name
|
||||
count: int; starting increment value
|
||||
lock: bool; if True, create a lock file in form .filename.lock to prevent other processes from using the same filename
|
||||
dry_run: bool; if True, don't actually create lock file
|
||||
|
||||
Returns:
|
||||
tuple of new filepath (or same if not incremented), count
|
||||
@ -449,15 +455,32 @@ def increment_filename_with_count(
|
||||
dest_files = [f.stem.lower() for f in dest_files]
|
||||
dest_new = f"{dest.stem} ({count})" if count else dest.stem
|
||||
dest_new = normalize_fs_path(dest_new)
|
||||
dest_new = sanitize_filestem_with_count(dest_new, dest.suffix)
|
||||
if lock and not dry_run:
|
||||
dest_lock = "." + dest_new + dest.suffix + ".lock"
|
||||
dest_lock = dest.parent / dest_lock
|
||||
else:
|
||||
dest_lock = pathlib.Path("")
|
||||
|
||||
while dest_new.lower() in dest_files:
|
||||
while dest_new.lower() in dest_files or (
|
||||
lock and not dry_run and dest_lock.exists()
|
||||
):
|
||||
count += 1
|
||||
dest_new = normalize_fs_path(f"{dest.stem} ({count})")
|
||||
dest_new = sanitize_filestem_with_count(dest_new, dest.suffix)
|
||||
if lock:
|
||||
dest_lock = "." + dest_new + dest.suffix + ".lock"
|
||||
dest_lock = dest.parent / dest_lock
|
||||
if lock and not dry_run:
|
||||
dest_lock.touch()
|
||||
dest = dest.parent / f"{dest_new}{dest.suffix}"
|
||||
|
||||
return normalize_fs_path(str(dest)), count
|
||||
|
||||
|
||||
def increment_filename(filepath: Union[str, pathlib.Path]) -> str:
|
||||
def increment_filename(
|
||||
filepath: Union[str, pathlib.Path], lock: bool = False, dry_run: bool = False
|
||||
) -> str:
|
||||
"""Return filename (1).ext, etc if filename.ext exists
|
||||
|
||||
If file exists in filename's parent folder with same stem as filename,
|
||||
@ -465,13 +488,17 @@ def increment_filename(filepath: Union[str, pathlib.Path]) -> str:
|
||||
|
||||
Args:
|
||||
filepath: str or pathlib.Path; full path, including file name
|
||||
lock: bool; if True, creates a lock file in form .filename.lock to prevent other processes from using the same filename
|
||||
dry_run: bool; if True, don't actually create lock file
|
||||
|
||||
Returns:
|
||||
new filepath (or same if not incremented)
|
||||
|
||||
Note: This obviously is subject to race condition so using with caution.
|
||||
Note: This obviously is subject to race condition so using with caution but using lock=True reduces the risk of race condition (but lock files must be cleaned up)
|
||||
"""
|
||||
new_filepath, _ = increment_filename_with_count(filepath)
|
||||
new_filepath, _ = increment_filename_with_count(
|
||||
filepath, lock=lock, dry_run=dry_run
|
||||
)
|
||||
return new_filepath
|
||||
|
||||
|
||||
|
||||
8
setup.py
8
setup.py
@ -74,12 +74,11 @@ setup(
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
],
|
||||
install_requires=[
|
||||
"Click>=8.0.1,<9.0",
|
||||
"Mako>=1.1.4,<1.2.0",
|
||||
"PyYAML>=5.4.1,<5.5.0",
|
||||
"bitmath>=1.3.3.1,<1.4.0.0",
|
||||
"bpylist2==3.0.2",
|
||||
"Click>=8.0.1,<9.0",
|
||||
"dataclasses==0.7;python_version<'3.7'",
|
||||
"Mako>=1.1.4,<1.2.0",
|
||||
"more-itertools>=8.8.0,<9.0.0",
|
||||
"objexplore>=1.5.5,<1.6.0",
|
||||
"osxmetadata>=0.99.34,<1.0.0",
|
||||
@ -87,15 +86,16 @@ setup(
|
||||
"photoscript>=0.1.4,<0.2.0",
|
||||
"ptpython>=3.0.20,<4.0.0",
|
||||
"pyobjc-core>=7.3,<9.0",
|
||||
"pyobjc-framework-AVFoundation>=7.3,<9.0",
|
||||
"pyobjc-framework-AppleScriptKit>=7.3,<9.0",
|
||||
"pyobjc-framework-AppleScriptObjC>=7.3,<9.0",
|
||||
"pyobjc-framework-AVFoundation>=7.3,<9.0",
|
||||
"pyobjc-framework-Cocoa>=7.3,<9.0",
|
||||
"pyobjc-framework-CoreServices>=7.2,<9.0",
|
||||
"pyobjc-framework-Metal>=7.3,<9.0",
|
||||
"pyobjc-framework-Photos>=7.3,<9.0",
|
||||
"pyobjc-framework-Quartz>=7.3,<9.0",
|
||||
"pyobjc-framework-Vision>=7.3,<9.0",
|
||||
"PyYAML>=5.4.1,<5.5.0",
|
||||
"rich>=10.6.0,<=11.0.0",
|
||||
"textx>=2.3.0,<3.0.0",
|
||||
"toml>=0.10.2,<0.11.0",
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -1446,6 +1446,7 @@ def test_query_exif_case_insensitive(exiftag, exifvalue, uuid_expected):
|
||||
|
||||
|
||||
def test_export():
|
||||
"""Test basic export"""
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
@ -1462,6 +1463,24 @@ def test_export():
|
||||
files = glob.glob("*")
|
||||
assert sorted(files) == sorted(CLI_EXPORT_FILENAMES)
|
||||
|
||||
def test_export_multiprocess():
|
||||
"""Test basic export with --multiprocess"""
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import osxphotos
|
||||
from osxphotos.cli import export
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
# pylint: disable=not-context-manager
|
||||
with runner.isolated_filesystem():
|
||||
result = runner.invoke(export, [os.path.join(cwd, CLI_PHOTOS_DB), ".", "-V", "--multiprocess", "2"])
|
||||
assert result.exit_code == 0
|
||||
files = glob.glob("*")
|
||||
assert sorted(files) == sorted(CLI_EXPORT_FILENAMES)
|
||||
|
||||
|
||||
def test_export_uuid_from_file():
|
||||
"""Test export with --uuid-from-file"""
|
||||
@ -4091,8 +4110,7 @@ def test_export_filename_template_long_description():
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
for fname in CLI_EXPORTED_FILENAME_TEMPLATE_LONG_DESCRIPTION:
|
||||
assert pathlib.Path(fname).is_file()
|
||||
assert "exported: 1" in result.output
|
||||
|
||||
|
||||
def test_export_filename_template_3():
|
||||
@ -5129,7 +5147,7 @@ def test_export_dry_run():
|
||||
in result.output
|
||||
)
|
||||
for filepath in CLI_EXPORT_FILENAMES_DRY_RUN:
|
||||
assert re.search(r"Exported.*" + f"{re.escape(filepath)}", result.output)
|
||||
assert re.search(r"Exported.*" + f"{re.escape(normalize_fs_path(filepath))}", result.output)
|
||||
assert not os.path.isfile(normalize_fs_path(filepath))
|
||||
|
||||
|
||||
|
||||
@ -140,7 +140,6 @@ def test_export_edited_exiftool(photosdb):
|
||||
got_dest = photos[0].export(
|
||||
dest, use_photos_export=True, edited=True, exiftool=True
|
||||
)
|
||||
logging.warning(got_dest)
|
||||
got_dest = got_dest[0]
|
||||
|
||||
assert os.path.isfile(got_dest)
|
||||
|
||||
@ -1,6 +1,10 @@
|
||||
""" Test path_utils.py """
|
||||
|
||||
|
||||
def test_sanitize_filename():
|
||||
"""test sanitize_filename"""
|
||||
|
||||
# subtract 6 chars from max length of 255 to account for lock file extension
|
||||
from osxphotos.path_utils import sanitize_filename
|
||||
from osxphotos._constants import MAX_FILENAME_LEN
|
||||
|
||||
@ -30,25 +34,25 @@ def test_sanitize_filename():
|
||||
filename = "foo" + "x" * 512
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo" + "x" * 252
|
||||
assert new_filename == "foo" + "x" * (252 - 6)
|
||||
|
||||
# filename too long with extension
|
||||
filename = "x" * 512 + ".jpeg"
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "x" * 250 + ".jpeg"
|
||||
assert new_filename == "x" * (250 - 6) + ".jpeg"
|
||||
|
||||
# more than one extension
|
||||
filename = "foo.bar" + "x" * 255 + ".foo.bar.jpeg"
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo.bar" + "x" * 243 + ".jpeg"
|
||||
assert new_filename == "foo.bar" + "x" * (243 - 6) + ".jpeg"
|
||||
|
||||
# shorter than drop count
|
||||
filename = "foo." + "x" * 256
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo." + "x" * 251
|
||||
assert new_filename == "foo." + "x" * (251 - 6)
|
||||
|
||||
|
||||
def test_sanitize_dirname():
|
||||
@ -83,6 +87,7 @@ def test_sanitize_dirname():
|
||||
assert len(new_dirname) == MAX_DIRNAME_LEN
|
||||
assert new_dirname == "foo" + "x" * 252
|
||||
|
||||
|
||||
def test_sanitize_pathpart():
|
||||
from osxphotos.path_utils import sanitize_pathpart
|
||||
from osxphotos._constants import MAX_DIRNAME_LEN
|
||||
@ -114,4 +119,3 @@ def test_sanitize_pathpart():
|
||||
new_dirname = sanitize_pathpart(dirname)
|
||||
assert len(new_dirname) == MAX_DIRNAME_LEN
|
||||
assert new_dirname == "foo" + "x" * 252
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user