Compare commits
19 Commits
v0.45.4
...
multiproce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5bdd52df25 | ||
|
|
3cde0b79c9 | ||
|
|
e2bd262f75 | ||
|
|
db26532bab | ||
|
|
7a73b9168d | ||
|
|
a43bfc5a33 | ||
|
|
1d6bc4e09e | ||
|
|
3e14b718ef | ||
|
|
1ae6270561 | ||
|
|
55a601c07e | ||
|
|
7d67b81879 | ||
|
|
cd02144ac3 | ||
|
|
9b247acd1c | ||
|
|
942126ea3d | ||
|
|
2b9ea11701 | ||
|
|
b3d3e14ffe | ||
|
|
62ae5db9fd | ||
|
|
77a49a09a1 | ||
|
|
79dcfb38a8 |
33
CHANGELOG.md
@@ -4,6 +4,39 @@ All notable changes to this project will be documented in this file. Dates are d
|
||||
|
||||
Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
|
||||
|
||||
#### [v0.45.8](https://github.com/RhetTbull/osxphotos/compare/v0.45.6...v0.45.8)
|
||||
|
||||
> 5 February 2022
|
||||
|
||||
- Fixed exiftool to ignore unsupported file types, #615 [`1ae6270`](https://github.com/RhetTbull/osxphotos/commit/1ae627056113fc4655f1b24cfbbdf0efc04489e7)
|
||||
- Updated tests [`55a601c`](https://github.com/RhetTbull/osxphotos/commit/55a601c07ea1384623c55d5c1d26b568df5d7823)
|
||||
- Additional fix for #615 [`1d6bc4e`](https://github.com/RhetTbull/osxphotos/commit/1d6bc4e09e3c2359a21f842fadd781920606812e)
|
||||
|
||||
#### [v0.45.6](https://github.com/RhetTbull/osxphotos/compare/v0.45.5...v0.45.6)
|
||||
|
||||
> 5 February 2022
|
||||
|
||||
- Fix for unicode in query strings, #618 [`9b247ac`](https://github.com/RhetTbull/osxphotos/commit/9b247acd1cc4b2def59fdd18a6fb3c8eb9914f11)
|
||||
- Fix for --name searching only original_filename on Photos 5+, #594 [`cd02144`](https://github.com/RhetTbull/osxphotos/commit/cd02144ac33cc1c13a20358133971c84d35b8a57)
|
||||
|
||||
#### [v0.45.5](https://github.com/RhetTbull/osxphotos/compare/v0.45.4...v0.45.5)
|
||||
|
||||
> 5 February 2022
|
||||
|
||||
- Fix for #561, no really, I mean it this time [`b3d3e14`](https://github.com/RhetTbull/osxphotos/commit/b3d3e14ffe41fbb22edb614b24f3985f379766a2)
|
||||
- Updated docs [skip ci] [`2b9ea11`](https://github.com/RhetTbull/osxphotos/commit/2b9ea11701799af9a661a8e2af70fca97235f487)
|
||||
- Updated tests for #561 [skip ci] [`77a49a0`](https://github.com/RhetTbull/osxphotos/commit/77a49a09a1bee74113a7114c543fbc25fa410ffc)
|
||||
|
||||
#### [v0.45.4](https://github.com/RhetTbull/osxphotos/compare/v0.45.3...v0.45.4)
|
||||
|
||||
> 3 February 2022
|
||||
|
||||
- docs: add oPromessa as a contributor for ideas, test [`#611`](https://github.com/RhetTbull/osxphotos/pull/611)
|
||||
- Fix for filenames with special characters, #561, #618 [`f3063d3`](https://github.com/RhetTbull/osxphotos/commit/f3063d35be3c96342d83dbd87ddd614a2001bff4)
|
||||
- Updated docs [skip ci] [`06c5bbf`](https://github.com/RhetTbull/osxphotos/commit/06c5bbfcfdf591a4a5d43f1456adaa27385fe01a)
|
||||
- Added progress counter, #601 [`7ab5007`](https://github.com/RhetTbull/osxphotos/commit/7ab500740b28594dcd778140e10991f839220e9d)
|
||||
- Updated known issues [skip ci] [`e32090b`](https://github.com/RhetTbull/osxphotos/commit/e32090bf39cb786171b49443f878ffdbab774420)
|
||||
|
||||
#### [v0.45.3](https://github.com/RhetTbull/osxphotos/compare/v0.45.2...v0.45.3)
|
||||
|
||||
> 29 January 2022
|
||||
|
||||
12
MANIFEST.in
@@ -1,7 +1,7 @@
|
||||
include README.md
|
||||
include README.rst
|
||||
include osxphotos/templates/*
|
||||
include osxphotos/*.json
|
||||
include osxphotos/*.md
|
||||
include osxphotos/phototemplate.tx
|
||||
include osxphotos/phototemplate.md
|
||||
include osxphotos/tutorial.md
|
||||
include osxphotos/queries/*
|
||||
include osxphotos/queries/*
|
||||
include osxphotos/templates/*
|
||||
include README.md
|
||||
include README.rst
|
||||
@@ -1180,6 +1180,9 @@ Options:
|
||||
--save-config <config file path>
|
||||
Save options to file for use with --load-
|
||||
config. File format is TOML.
|
||||
-M, --multiprocess NUMBER_OF_PROCESSES
|
||||
Run export in parallel using
|
||||
NUMBER_OF_PROCESSES processes. [x>=1]
|
||||
--help Show this message and exit.
|
||||
|
||||
** Export **
|
||||
@@ -1725,7 +1728,7 @@ Substitution Description
|
||||
{lf} A line feed: '\n', alias for {newline}
|
||||
{cr} A carriage return: '\r'
|
||||
{crlf} a carriage return + line feed: '\r\n'
|
||||
{osxphotos_version} The osxphotos version, e.g. '0.45.4'
|
||||
{osxphotos_version} The osxphotos version, e.g. '0.45.8'
|
||||
{osxphotos_cmd_line} The full command line used to run osxphotos
|
||||
|
||||
The following substitutions may result in multiple values. Thus if specified for
|
||||
@@ -3629,7 +3632,7 @@ The following template field substitutions are availabe for use the templating s
|
||||
|{lf}|A line feed: '\n', alias for {newline}|
|
||||
|{cr}|A carriage return: '\r'|
|
||||
|{crlf}|a carriage return + line feed: '\r\n'|
|
||||
|{osxphotos_version}|The osxphotos version, e.g. '0.45.4'|
|
||||
|{osxphotos_version}|The osxphotos version, e.g. '0.45.8'|
|
||||
|{osxphotos_cmd_line}|The full command line used to run osxphotos|
|
||||
|{album}|Album(s) photo is contained in|
|
||||
|{folder_album}|Folder path + album photo is contained in. e.g. 'Folder/Subfolder/Album' or just 'Album' if no enclosing folder|
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Sphinx build info version 1
|
||||
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
|
||||
config: 001a184f6f166bf8f64bf9bb56e7b73e
|
||||
config: bf43bf49b725c31ce72a8823e4f8012b
|
||||
tags: 645f666f9bcd5a90fca523b33c5a78b7
|
||||
|
||||
2
docs/_static/documentation_options.js
vendored
@@ -1,6 +1,6 @@
|
||||
var DOCUMENTATION_OPTIONS = {
|
||||
URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
|
||||
VERSION: '0.45.4',
|
||||
VERSION: '0.45.8',
|
||||
LANGUAGE: 'None',
|
||||
COLLAPSE_INDEX: false,
|
||||
BUILDER: 'html',
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>osxphotos command line interface (CLI) — osxphotos 0.45.4 documentation</title>
|
||||
<title>osxphotos command line interface (CLI) — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Index — osxphotos 0.45.4 documentation</title>
|
||||
<title>Index — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>Welcome to osxphotos’s documentation! — osxphotos 0.45.4 documentation</title>
|
||||
<title>Welcome to osxphotos’s documentation! — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>osxphotos — osxphotos 0.45.4 documentation</title>
|
||||
<title>osxphotos — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>osxphotos package — osxphotos 0.45.4 documentation</title>
|
||||
<title>osxphotos package — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Search — osxphotos 0.45.4 documentation</title>
|
||||
<title>Search — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ datas = [
|
||||
("osxphotos/phototemplate.tx", "osxphotos"),
|
||||
("osxphotos/phototemplate.md", "osxphotos"),
|
||||
("osxphotos/tutorial.md", "osxphotos"),
|
||||
("osxphotos/exiftool_filetypes.json", "osxphotos"),
|
||||
]
|
||||
package_imports = [["photoscript", ["photoscript.applescript"]]]
|
||||
for package, files in package_imports:
|
||||
|
||||
@@ -214,7 +214,8 @@ SEARCH_CATEGORY_PHOTO_NAME = 2056
|
||||
|
||||
|
||||
# Max filename length on MacOS
|
||||
MAX_FILENAME_LEN = 255
|
||||
# subtract 6 chars for the lock file extension in form: ".filename.lock"
|
||||
MAX_FILENAME_LEN = 255 - 6
|
||||
|
||||
# Max directory name length on MacOS
|
||||
MAX_DIRNAME_LEN = 255
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
""" version info """
|
||||
|
||||
__version__ = "0.45.4"
|
||||
__version__ = "0.45.8"
|
||||
|
||||
779
osxphotos/cli.py
@@ -8,6 +8,7 @@ import dataclasses
|
||||
import datetime
|
||||
import io
|
||||
import json
|
||||
import multiprocessing as mp
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
@@ -27,8 +28,10 @@ import osxmetadata
|
||||
import photoscript
|
||||
import rich.traceback
|
||||
import yaml
|
||||
from more_itertools import divide
|
||||
from rich import pretty, print
|
||||
from rich.console import Console
|
||||
from rich.progress import Progress
|
||||
from rich.syntax import Syntax
|
||||
|
||||
import osxphotos
|
||||
@@ -148,7 +151,7 @@ def verbose_(*args, **kwargs):
|
||||
"""print output if verbose flag set"""
|
||||
if VERBOSE:
|
||||
styled_args = []
|
||||
timestamp = str(datetime.datetime.now()) + " -- " if VERBOSE_TIMESTAMP else ""
|
||||
timestamp = f"[{datetime.datetime.now()}] -- " if VERBOSE_TIMESTAMP else ""
|
||||
for arg in args:
|
||||
if type(arg) == str:
|
||||
arg = timestamp + arg
|
||||
@@ -1169,6 +1172,13 @@ def cli(ctx, db, json_, debug):
|
||||
help=("Save options to file for use with --load-config. File format is TOML."),
|
||||
type=click.Path(),
|
||||
)
|
||||
@click.option(
|
||||
"--multiprocess",
|
||||
"-M",
|
||||
metavar="NUMBER_OF_PROCESSES",
|
||||
help="Run export in parallel using NUMBER_OF_PROCESSES processes. ",
|
||||
type=click.IntRange(min=1),
|
||||
)
|
||||
@click.option(
|
||||
"--beta",
|
||||
is_flag=True,
|
||||
@@ -1338,6 +1348,7 @@ def export(
|
||||
preview_if_missing,
|
||||
profile,
|
||||
profile_sort,
|
||||
multiprocess,
|
||||
):
|
||||
"""Export photos from the Photos database.
|
||||
Export path DEST is required.
|
||||
@@ -1868,198 +1879,28 @@ def export(
|
||||
# store results of export
|
||||
results = ExportResults()
|
||||
|
||||
if photos:
|
||||
if not photos:
|
||||
click.echo("Did not find any photos to export")
|
||||
else:
|
||||
num_photos = len(photos)
|
||||
# TODO: photos or photo appears several times, pull into a separate function
|
||||
photo_str = "photos" if num_photos > 1 else "photo"
|
||||
click.echo(f"Exporting {num_photos} {photo_str} to {dest}...")
|
||||
start_time = time.perf_counter()
|
||||
# though the command line option is current_name, internally all processing
|
||||
# logic uses original_name which is the boolean inverse of current_name
|
||||
# because the original code used --original-name as an option
|
||||
original_name = not current_name
|
||||
|
||||
# set up for --add-export-to-album if needed
|
||||
album_export = (
|
||||
PhotosAlbum(add_exported_to_album, verbose=verbose_)
|
||||
if add_exported_to_album
|
||||
else None
|
||||
)
|
||||
album_skipped = (
|
||||
PhotosAlbum(add_skipped_to_album, verbose=verbose_)
|
||||
if add_skipped_to_album
|
||||
else None
|
||||
)
|
||||
album_missing = (
|
||||
PhotosAlbum(add_missing_to_album, verbose=verbose_)
|
||||
if add_missing_to_album
|
||||
else None
|
||||
)
|
||||
|
||||
photo_num = 0
|
||||
# send progress bar output to /dev/null if verbose to hide the progress bar
|
||||
fp = open(os.devnull, "w") if verbose else None
|
||||
with click.progressbar(photos, show_pos=True, file=fp) as bar:
|
||||
for p in bar:
|
||||
photo_num += 1
|
||||
export_results = export_photo(
|
||||
photo=p,
|
||||
dest=dest,
|
||||
verbose=verbose,
|
||||
export_by_date=export_by_date,
|
||||
sidecar=sidecar,
|
||||
sidecar_drop_ext=sidecar_drop_ext,
|
||||
update=update,
|
||||
ignore_signature=ignore_signature,
|
||||
export_as_hardlink=export_as_hardlink,
|
||||
overwrite=overwrite,
|
||||
export_edited=export_edited,
|
||||
skip_original_if_edited=skip_original_if_edited,
|
||||
original_name=original_name,
|
||||
export_live=export_live,
|
||||
download_missing=download_missing,
|
||||
exiftool=exiftool,
|
||||
exiftool_merge_keywords=exiftool_merge_keywords,
|
||||
exiftool_merge_persons=exiftool_merge_persons,
|
||||
directory=directory,
|
||||
filename_template=filename_template,
|
||||
export_raw=export_raw,
|
||||
album_keyword=album_keyword,
|
||||
person_keyword=person_keyword,
|
||||
keyword_template=keyword_template,
|
||||
description_template=description_template,
|
||||
export_db=export_db,
|
||||
fileutil=fileutil,
|
||||
dry_run=dry_run,
|
||||
touch_file=touch_file,
|
||||
edited_suffix=edited_suffix,
|
||||
original_suffix=original_suffix,
|
||||
use_photos_export=use_photos_export,
|
||||
convert_to_jpeg=convert_to_jpeg,
|
||||
jpeg_quality=jpeg_quality,
|
||||
ignore_date_modified=ignore_date_modified,
|
||||
use_photokit=use_photokit,
|
||||
exiftool_option=exiftool_option,
|
||||
strip=strip,
|
||||
jpeg_ext=jpeg_ext,
|
||||
replace_keywords=replace_keywords,
|
||||
retry=retry,
|
||||
export_dir=dest,
|
||||
export_preview=preview,
|
||||
preview_suffix=preview_suffix,
|
||||
preview_if_missing=preview_if_missing,
|
||||
photo_num=photo_num,
|
||||
num_photos=num_photos,
|
||||
)
|
||||
|
||||
if post_function:
|
||||
for function in post_function:
|
||||
# post function is tuple of (function, filename.py::function_name)
|
||||
verbose_(f"Calling post-function {function[1]}")
|
||||
if not dry_run:
|
||||
try:
|
||||
function[0](p, export_results, verbose_)
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error running post-function {function[1]}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
run_post_command(
|
||||
photo=p,
|
||||
post_command=post_command,
|
||||
export_results=export_results,
|
||||
export_dir=dest,
|
||||
dry_run=dry_run,
|
||||
exiftool_path=exiftool_path,
|
||||
export_db=export_db,
|
||||
)
|
||||
|
||||
if album_export and export_results.exported:
|
||||
try:
|
||||
album_export.add(p)
|
||||
export_results.exported_album = [
|
||||
(filename, album_export.name)
|
||||
for filename in export_results.exported
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_export.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
if album_skipped and export_results.skipped:
|
||||
try:
|
||||
album_skipped.add(p)
|
||||
export_results.skipped_album = [
|
||||
(filename, album_skipped.name)
|
||||
for filename in export_results.skipped
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_skipped.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
if album_missing and export_results.missing:
|
||||
try:
|
||||
album_missing.add(p)
|
||||
export_results.missing_album = [
|
||||
(filename, album_missing.name)
|
||||
for filename in export_results.missing
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_missing.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
results += export_results
|
||||
|
||||
# all photo files (not including sidecars) that are part of this export set
|
||||
# used below for applying Finder tags, etc.
|
||||
photo_files = set(
|
||||
export_results.exported
|
||||
+ export_results.new
|
||||
+ export_results.updated
|
||||
+ export_results.exif_updated
|
||||
+ export_results.converted_to_jpeg
|
||||
+ export_results.skipped
|
||||
)
|
||||
|
||||
if finder_tag_keywords or finder_tag_template:
|
||||
tags_written, tags_skipped = write_finder_tags(
|
||||
p,
|
||||
photo_files,
|
||||
keywords=finder_tag_keywords,
|
||||
keyword_template=keyword_template,
|
||||
album_keyword=album_keyword,
|
||||
person_keyword=person_keyword,
|
||||
exiftool_merge_keywords=exiftool_merge_keywords,
|
||||
finder_tag_template=finder_tag_template,
|
||||
strip=strip,
|
||||
export_dir=dest,
|
||||
)
|
||||
results.xattr_written.extend(tags_written)
|
||||
results.xattr_skipped.extend(tags_skipped)
|
||||
|
||||
if xattr_template:
|
||||
xattr_written, xattr_skipped = write_extended_attributes(
|
||||
p,
|
||||
photo_files,
|
||||
xattr_template,
|
||||
strip=strip,
|
||||
export_dir=dest,
|
||||
)
|
||||
results.xattr_written.extend(xattr_written)
|
||||
results.xattr_skipped.extend(xattr_skipped)
|
||||
|
||||
if fp is not None:
|
||||
fp.close()
|
||||
if multiprocess:
|
||||
results = _export_photos_with_multiprocessing(
|
||||
photos, kwargs={**locals(), **globals()}
|
||||
)
|
||||
else:
|
||||
# some hackery to get the arguments for export_photos
|
||||
export_args = export_photos.__code__.co_varnames
|
||||
results = export_photos(
|
||||
**{
|
||||
k: v
|
||||
for k, v in {**locals(), **globals()}.items()
|
||||
if k in export_args
|
||||
}
|
||||
)
|
||||
|
||||
photo_str_total = "photos" if len(photos) != 1 else "photo"
|
||||
if update:
|
||||
@@ -2082,8 +1923,6 @@ def export(
|
||||
click.echo(summary)
|
||||
stop_time = time.perf_counter()
|
||||
click.echo(f"Elapsed time: {format_sec_to_hhmmss(stop_time-start_time)}")
|
||||
else:
|
||||
click.echo("Did not find any photos to export")
|
||||
|
||||
# cleanup files and do report if needed
|
||||
if cleanup:
|
||||
@@ -2124,16 +1963,102 @@ def export(
|
||||
export_db.close()
|
||||
|
||||
|
||||
def _export_with_profiler(args: Dict):
|
||||
""" "Run export with cProfile"""
|
||||
def _export_photos_with_multiprocessing(photos: List, kwargs: Dict) -> ExportResults():
|
||||
"""Run export using multiple processes"""
|
||||
try:
|
||||
args.pop("profile")
|
||||
num_procs = kwargs.get("multiprocess")
|
||||
except KeyError:
|
||||
pass
|
||||
raise ValueError("_export_runner called without multiprocess param")
|
||||
|
||||
cProfile.runctx(
|
||||
"_export(**args)", globals=globals(), locals=locals(), sort="tottime"
|
||||
)
|
||||
# build kwargs for export_photos
|
||||
# keep only the params export_photos expects
|
||||
export_args = export_photos.__code__.co_varnames
|
||||
kwargs = {arg: value for arg, value in kwargs.items() if arg in export_args}
|
||||
for arg in ["photosdb", "photos"]:
|
||||
kwargs.pop(arg, None)
|
||||
kwargs["photos"] = None
|
||||
|
||||
# can't pickle an open sqlite connection so ensure export_db is closed
|
||||
export_db = kwargs.get("export_db")
|
||||
export_db.close()
|
||||
|
||||
# verbose output?
|
||||
verbose = kwargs.get("verbose", None)
|
||||
|
||||
# get list of uuids to pass to export_photos
|
||||
uuids = [p.uuid for p in photos]
|
||||
uuid_chunks = [list(chunk) for chunk in divide(num_procs, uuids)]
|
||||
|
||||
# create a queue to communicate with processes
|
||||
q = mp.Queue()
|
||||
processes = []
|
||||
if len(uuid_chunks) < num_procs:
|
||||
num_procs = len(uuid_chunks)
|
||||
for i in range(num_procs):
|
||||
kwargs = kwargs.copy()
|
||||
kwargs["_mp_queue"] = q
|
||||
kwargs["_mp_process_total"] = num_procs
|
||||
kwargs["_mp_process_num"] = i
|
||||
kwargs["_mp_uuids"] = uuid_chunks[i]
|
||||
if not kwargs["_mp_uuids"]:
|
||||
click.echo(f"Out of UUIDs to process, skipping process {i}")
|
||||
continue
|
||||
click.echo(f"Starting process number #{i}")
|
||||
p = mp.Process(target=export_photos, kwargs=kwargs)
|
||||
p.start()
|
||||
processes.append(p)
|
||||
|
||||
class FakeProgress:
|
||||
def __init__(self):
|
||||
self.finished = False
|
||||
self.console = Console()
|
||||
|
||||
def add_task(self, task, total):
|
||||
pass
|
||||
|
||||
def update(self, task_id, completed):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
pass
|
||||
|
||||
progress_class = Progress if not verbose else FakeProgress
|
||||
export_results = ExportResults()
|
||||
with progress_class() as progress:
|
||||
tasks = []
|
||||
for i, p in enumerate(processes):
|
||||
tasks.append(
|
||||
progress.add_task(
|
||||
f"Process {i} ({len(uuid_chunks[i])} photos)...",
|
||||
total=len(uuid_chunks[i]),
|
||||
)
|
||||
)
|
||||
|
||||
while not progress.finished:
|
||||
while True:
|
||||
if not any(mp.active_children()):
|
||||
break
|
||||
try:
|
||||
results = q.get(timeout=0.5)
|
||||
# print(results)
|
||||
if results[1] == "VERBOSE":
|
||||
progress.console.print(f"{results[0]}: {results[2]}")
|
||||
# verbose_(f"{results[0]}: {results[2]}")
|
||||
elif results[1] == "DONE":
|
||||
# click.echo(f"Process {results[0]} is done")
|
||||
export_results += ExportResults(**results[2])
|
||||
if isinstance(progress, FakeProgress):
|
||||
progress.finished = True
|
||||
elif results[1] == "PROGRESS":
|
||||
progress.update(tasks[results[0]], completed=results[2])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
click.echo("All processes finished")
|
||||
return export_results
|
||||
|
||||
|
||||
@cli.command()
|
||||
@@ -2582,100 +2507,387 @@ def print_photo_info(photos, json=False):
|
||||
csv_writer.writerow(row)
|
||||
|
||||
|
||||
def export_photos(
|
||||
add_exported_to_album,
|
||||
add_missing_to_album,
|
||||
add_skipped_to_album,
|
||||
album_keyword,
|
||||
convert_to_jpeg,
|
||||
current_name,
|
||||
db,
|
||||
description_template,
|
||||
dest,
|
||||
directory,
|
||||
download_missing,
|
||||
dry_run,
|
||||
edited_suffix,
|
||||
exiftool_merge_keywords,
|
||||
exiftool_merge_persons,
|
||||
exiftool_option,
|
||||
exiftool_path,
|
||||
exiftool,
|
||||
export_as_hardlink,
|
||||
export_by_date,
|
||||
export_db,
|
||||
export_edited,
|
||||
export_live,
|
||||
export_raw,
|
||||
filename_template,
|
||||
fileutil,
|
||||
finder_tag_keywords,
|
||||
finder_tag_template,
|
||||
ignore_date_modified,
|
||||
ignore_signature,
|
||||
jpeg_ext,
|
||||
jpeg_quality,
|
||||
keyword_template,
|
||||
multiprocess,
|
||||
original_suffix,
|
||||
overwrite,
|
||||
person_keyword,
|
||||
photos,
|
||||
post_command,
|
||||
post_function,
|
||||
preview_if_missing,
|
||||
preview_suffix,
|
||||
preview,
|
||||
replace_keywords,
|
||||
retry,
|
||||
sidecar_drop_ext,
|
||||
sidecar,
|
||||
skip_original_if_edited,
|
||||
strip,
|
||||
touch_file,
|
||||
update,
|
||||
use_photokit,
|
||||
use_photos_export,
|
||||
verbose,
|
||||
verbose_,
|
||||
xattr_template,
|
||||
_mp_uuids=None,
|
||||
_mp_process_total=None,
|
||||
_mp_process_num=None,
|
||||
_mp_queue=None,
|
||||
**kwargs,
|
||||
):
|
||||
"""export photos"""
|
||||
|
||||
# Need to pass the verbose_ method if for multiprocessing to work
|
||||
_mp_verbose = None
|
||||
if multiprocess:
|
||||
_mp_queue.put(
|
||||
[
|
||||
_mp_process_num,
|
||||
"START",
|
||||
f"multiprocess mode: {_mp_process_num}, {_mp_process_total}",
|
||||
]
|
||||
)
|
||||
|
||||
def _mp_verbose(*args, **kwargs):
|
||||
_mp_queue.put([_mp_process_num, "VERBOSE", args])
|
||||
|
||||
verbose_ = _mp_verbose
|
||||
photosdb = osxphotos.PhotosDB(db, verbose=verbose_)
|
||||
verbose_(f"_mp_uuids: {len(_mp_uuids)}")
|
||||
photos = photosdb.photos_by_uuid(_mp_uuids)
|
||||
verbose_(f"photos: {len(photos)}")
|
||||
|
||||
results = ExportResults()
|
||||
num_photos = len(photos)
|
||||
# though the command line option is current_name, internally all processing
|
||||
# logic uses original_name which is the boolean inverse of current_name
|
||||
# because the original code used --original-name as an option
|
||||
original_name = not current_name
|
||||
|
||||
# set up for --add-export-to-album if needed
|
||||
album_export = (
|
||||
PhotosAlbum(add_exported_to_album, verbose=verbose_)
|
||||
if add_exported_to_album
|
||||
else None
|
||||
)
|
||||
album_skipped = (
|
||||
PhotosAlbum(add_skipped_to_album, verbose=verbose_)
|
||||
if add_skipped_to_album
|
||||
else None
|
||||
)
|
||||
album_missing = (
|
||||
PhotosAlbum(add_missing_to_album, verbose=verbose_)
|
||||
if add_missing_to_album
|
||||
else None
|
||||
)
|
||||
|
||||
photo_num = 0
|
||||
# send progress bar output to /dev/null if verbose or multiprocess to hide the progress bar
|
||||
fp = open(os.devnull, "w") if verbose or multiprocess else None
|
||||
with click.progressbar(photos, show_pos=True, file=fp) as bar:
|
||||
for p in bar:
|
||||
photo_num += 1
|
||||
if multiprocess:
|
||||
_mp_queue.put([_mp_process_num, "PROGRESS", photo_num, num_photos])
|
||||
export_results = export_photo(
|
||||
photo=p,
|
||||
dest=dest,
|
||||
album_keyword=album_keyword,
|
||||
convert_to_jpeg=convert_to_jpeg,
|
||||
description_template=description_template,
|
||||
directory=directory,
|
||||
download_missing=download_missing,
|
||||
dry_run=dry_run,
|
||||
edited_suffix=edited_suffix,
|
||||
exiftool_merge_keywords=exiftool_merge_keywords,
|
||||
exiftool_merge_persons=exiftool_merge_persons,
|
||||
exiftool_option=exiftool_option,
|
||||
exiftool=exiftool,
|
||||
export_as_hardlink=export_as_hardlink,
|
||||
export_by_date=export_by_date,
|
||||
export_db=export_db,
|
||||
export_dir=dest,
|
||||
export_edited=export_edited,
|
||||
export_live=export_live,
|
||||
export_preview=preview,
|
||||
export_raw=export_raw,
|
||||
filename_template=filename_template,
|
||||
fileutil=fileutil,
|
||||
ignore_date_modified=ignore_date_modified,
|
||||
ignore_signature=ignore_signature,
|
||||
jpeg_ext=jpeg_ext,
|
||||
jpeg_quality=jpeg_quality,
|
||||
keyword_template=keyword_template,
|
||||
num_photos=num_photos,
|
||||
original_name=original_name,
|
||||
original_suffix=original_suffix,
|
||||
overwrite=overwrite,
|
||||
person_keyword=person_keyword,
|
||||
photo_num=photo_num,
|
||||
preview_if_missing=preview_if_missing,
|
||||
preview_suffix=preview_suffix,
|
||||
replace_keywords=replace_keywords,
|
||||
retry=retry,
|
||||
sidecar_drop_ext=sidecar_drop_ext,
|
||||
sidecar=sidecar,
|
||||
skip_original_if_edited=skip_original_if_edited,
|
||||
strip=strip,
|
||||
touch_file=touch_file,
|
||||
update=update,
|
||||
use_photokit=use_photokit,
|
||||
use_photos_export=use_photos_export,
|
||||
verbose_=verbose_,
|
||||
verbose=verbose,
|
||||
_mp_verbose=_mp_verbose,
|
||||
)
|
||||
|
||||
if post_function:
|
||||
for function in post_function:
|
||||
# post function is tuple of (function, filename.py::function_name)
|
||||
verbose_(f"Calling post-function {function[1]}")
|
||||
if not dry_run:
|
||||
try:
|
||||
function[0](p, export_results, verbose_)
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error running post-function {function[1]}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
run_post_command(
|
||||
photo=p,
|
||||
post_command=post_command,
|
||||
export_results=export_results,
|
||||
export_dir=dest,
|
||||
dry_run=dry_run,
|
||||
exiftool_path=exiftool_path,
|
||||
export_db=export_db,
|
||||
)
|
||||
|
||||
if album_export and export_results.exported:
|
||||
try:
|
||||
album_export.add(p)
|
||||
export_results.exported_album = [
|
||||
(filename, album_export.name)
|
||||
for filename in export_results.exported
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_export.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
if album_skipped and export_results.skipped:
|
||||
try:
|
||||
album_skipped.add(p)
|
||||
export_results.skipped_album = [
|
||||
(filename, album_skipped.name)
|
||||
for filename in export_results.skipped
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_skipped.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
if album_missing and export_results.missing:
|
||||
try:
|
||||
album_missing.add(p)
|
||||
export_results.missing_album = [
|
||||
(filename, album_missing.name)
|
||||
for filename in export_results.missing
|
||||
]
|
||||
except Exception as e:
|
||||
click.secho(
|
||||
f"Error adding photo {p.original_filename} ({p.uuid}) to album {album_missing.name}: {e}",
|
||||
fg=CLI_COLOR_ERROR,
|
||||
err=True,
|
||||
)
|
||||
|
||||
results += export_results
|
||||
|
||||
# all photo files (not including sidecars) that are part of this export set
|
||||
# used below for applying Finder tags, etc.
|
||||
photo_files = set(
|
||||
export_results.exported
|
||||
+ export_results.new
|
||||
+ export_results.updated
|
||||
+ export_results.exif_updated
|
||||
+ export_results.converted_to_jpeg
|
||||
+ export_results.skipped
|
||||
)
|
||||
|
||||
if finder_tag_keywords or finder_tag_template:
|
||||
tags_written, tags_skipped = write_finder_tags(
|
||||
p,
|
||||
photo_files,
|
||||
keywords=finder_tag_keywords,
|
||||
keyword_template=keyword_template,
|
||||
album_keyword=album_keyword,
|
||||
person_keyword=person_keyword,
|
||||
exiftool_merge_keywords=exiftool_merge_keywords,
|
||||
finder_tag_template=finder_tag_template,
|
||||
strip=strip,
|
||||
export_dir=dest,
|
||||
)
|
||||
results.xattr_written.extend(tags_written)
|
||||
results.xattr_skipped.extend(tags_skipped)
|
||||
|
||||
if xattr_template:
|
||||
xattr_written, xattr_skipped = write_extended_attributes(
|
||||
p,
|
||||
photo_files,
|
||||
xattr_template,
|
||||
strip=strip,
|
||||
export_dir=dest,
|
||||
)
|
||||
results.xattr_written.extend(xattr_written)
|
||||
results.xattr_skipped.extend(xattr_skipped)
|
||||
|
||||
if fp is not None:
|
||||
fp.close()
|
||||
|
||||
if multiprocess:
|
||||
_mp_queue.put([_mp_process_num, "DONE", results.asdict()])
|
||||
else:
|
||||
return results
|
||||
|
||||
|
||||
def export_photo(
|
||||
photo=None,
|
||||
dest=None,
|
||||
verbose=None,
|
||||
export_by_date=None,
|
||||
sidecar=None,
|
||||
sidecar_drop_ext=False,
|
||||
update=None,
|
||||
ignore_signature=None,
|
||||
export_as_hardlink=None,
|
||||
overwrite=None,
|
||||
export_edited=None,
|
||||
skip_original_if_edited=None,
|
||||
original_name=None,
|
||||
export_live=None,
|
||||
album_keyword=None,
|
||||
convert_to_jpeg=False,
|
||||
description_template=None,
|
||||
directory=None,
|
||||
download_missing=None,
|
||||
exiftool=None,
|
||||
dry_run=None,
|
||||
edited_suffix="_edited",
|
||||
exiftool_merge_keywords=False,
|
||||
exiftool_merge_persons=False,
|
||||
directory=None,
|
||||
filename_template=None,
|
||||
export_raw=None,
|
||||
album_keyword=None,
|
||||
person_keyword=None,
|
||||
keyword_template=None,
|
||||
description_template=None,
|
||||
export_db=None,
|
||||
fileutil=FileUtil,
|
||||
dry_run=None,
|
||||
touch_file=None,
|
||||
edited_suffix="_edited",
|
||||
original_suffix="",
|
||||
use_photos_export=False,
|
||||
convert_to_jpeg=False,
|
||||
jpeg_quality=1.0,
|
||||
ignore_date_modified=False,
|
||||
use_photokit=False,
|
||||
exiftool_option=None,
|
||||
strip=False,
|
||||
exiftool=None,
|
||||
export_as_hardlink=None,
|
||||
export_by_date=None,
|
||||
export_db=None,
|
||||
export_dir=None,
|
||||
export_edited=None,
|
||||
export_live=None,
|
||||
export_preview=False,
|
||||
export_raw=None,
|
||||
filename_template=None,
|
||||
fileutil=FileUtil,
|
||||
ignore_date_modified=False,
|
||||
ignore_signature=None,
|
||||
jpeg_ext=None,
|
||||
jpeg_quality=1.0,
|
||||
keyword_template=None,
|
||||
num_photos=1,
|
||||
original_name=None,
|
||||
original_suffix="",
|
||||
overwrite=None,
|
||||
person_keyword=None,
|
||||
photo_num=1,
|
||||
preview_if_missing=False,
|
||||
preview_suffix=None,
|
||||
replace_keywords=False,
|
||||
retry=0,
|
||||
export_dir=None,
|
||||
export_preview=False,
|
||||
preview_suffix=None,
|
||||
preview_if_missing=False,
|
||||
photo_num=1,
|
||||
num_photos=1,
|
||||
sidecar_drop_ext=False,
|
||||
sidecar=None,
|
||||
skip_original_if_edited=None,
|
||||
strip=False,
|
||||
touch_file=None,
|
||||
update=None,
|
||||
use_photokit=False,
|
||||
use_photos_export=False,
|
||||
verbose_=None,
|
||||
verbose=None,
|
||||
_mp_verbose=None,
|
||||
):
|
||||
"""Helper function for export that does the actual export
|
||||
|
||||
Args:
|
||||
photo: PhotoInfo object
|
||||
dest: destination path as string
|
||||
verbose: boolean; print verbose output
|
||||
export_by_date: boolean; create export folder in form dest/YYYY/MM/DD
|
||||
sidecar: list zero, 1 or 2 of ["json","xmp"] of sidecar variety to export
|
||||
sidecar_drop_ext: boolean; if True, drops photo extension from sidecar name
|
||||
export_as_hardlink: boolean; hardlink files instead of copying them
|
||||
overwrite: boolean; overwrite dest file if it already exists
|
||||
original_name: boolean; use original filename instead of current filename
|
||||
export_live: boolean; also export live video component if photo is a live photo
|
||||
live video will have same name as photo but with .mov extension
|
||||
download_missing: attempt download of missing iCloud photos
|
||||
exiftool: use exiftool to write EXIF metadata directly to exported photo
|
||||
directory: template used to determine output directory
|
||||
filename_template: template use to determine output file
|
||||
export_raw: boolean; if True exports raw image associate with the photo
|
||||
export_edited: boolean; if True exports edited version of photo if there is one
|
||||
skip_original_if_edited: boolean; if True does not export original if photo has been edited
|
||||
album_keyword: boolean; if True, exports album names as keywords in metadata
|
||||
person_keyword: boolean; if True, exports person names as keywords in metadata
|
||||
keyword_template: list of strings; if provided use rendered template strings as keywords
|
||||
description_template: string; optional template string that will be rendered for use as photo description
|
||||
export_db: export database instance compatible with ExportDB_ABC
|
||||
fileutil: file util class compatible with FileUtilABC
|
||||
dry_run: boolean; if True, doesn't actually export or update any files
|
||||
touch_file: boolean; sets file's modification time to match photo date
|
||||
use_photos_export: boolean; if True forces the use of AppleScript to export even if photo not missing
|
||||
convert_to_jpeg: boolean; if True, converts non-jpeg images to jpeg
|
||||
jpeg_quality: float in range 0.0 <= jpeg_quality <= 1.0. A value of 1.0 specifies use best quality, a value of 0.0 specifies use maximum compression.
|
||||
ignore_date_modified: if True, sets EXIF:ModifyDate to EXIF:DateTimeOriginal even if date_modified is set
|
||||
exiftool_option: optional list flags (e.g. ["-m", "-F"]) to pass to exiftool
|
||||
description_template: string; optional template string that will be rendered for use as photo description
|
||||
directory: template used to determine output directory
|
||||
download_missing: attempt download of missing iCloud photos
|
||||
dry_run: boolean; if True, doesn't actually export or update any files
|
||||
exiftool_merge_keywords: boolean; if True, merged keywords found in file's exif data (requires exiftool)
|
||||
exiftool_merge_persons: boolean; if True, merged persons found in file's exif data (requires exiftool)
|
||||
exiftool_option: optional list flags (e.g. ["-m", "-F"]) to pass to exiftool
|
||||
exiftool: use exiftool to write EXIF metadata directly to exported photo
|
||||
export_as_hardlink: boolean; hardlink files instead of copying them
|
||||
export_by_date: boolean; create export folder in form dest/YYYY/MM/DD
|
||||
export_db: export database instance compatible with ExportDB_ABC
|
||||
export_dir: top-level export directory for {export_dir} template
|
||||
export_edited: boolean; if True exports edited version of photo if there is one
|
||||
export_live: boolean; also export live video component if photo is a live photo; live video will have same name as photo but with .mov extension
|
||||
export_preview: export the preview image generated by Photos
|
||||
export_raw: boolean; if True exports raw image associate with the photo
|
||||
filename_template: template use to determine output file
|
||||
fileutil: file util class compatible with FileUtilABC
|
||||
ignore_date_modified: if True, sets EXIF:ModifyDate to EXIF:DateTimeOriginal even if date_modified is set
|
||||
jpeg_ext: if not None, specify the extension to use for all JPEG images on export
|
||||
jpeg_quality: float in range 0.0 <= jpeg_quality <= 1.0. A value of 1.0 specifies use best quality, a value of 0.0 specifies use maximum compression.
|
||||
keyword_template: list of strings; if provided use rendered template strings as keywords
|
||||
num_photos: int, total number of photos that will be exported
|
||||
original_name: boolean; use original filename instead of current filename
|
||||
overwrite: boolean; overwrite dest file if it already exists
|
||||
person_keyword: boolean; if True, exports person names as keywords in metadata
|
||||
photo_num: int, which number photo in total of num_photos is being exported
|
||||
preview_if_missing: bool, export preview if original is missing
|
||||
preview_suffix: str, template to use as suffix for preview images
|
||||
replace_keywords: if True, --keyword-template replaces keywords instead of adding keywords
|
||||
retry: retry up to retry # of times if there's an error
|
||||
export_dir: top-level export directory for {export_dir} template
|
||||
export_preview: export the preview image generated by Photos
|
||||
preview_suffix: str, template to use as suffix for preview images
|
||||
preview_if_missing: bool, export preview if original is missing
|
||||
photo_num: int, which number photo in total of num_photos is being exported
|
||||
num_photos: int, total number of photos that will be exported
|
||||
sidecar_drop_ext: boolean; if True, drops photo extension from sidecar name
|
||||
sidecar: list zero, 1 or 2 of ["json","xmp"] of sidecar variety to export
|
||||
skip_original_if_edited: boolean; if True does not export original if photo has been edited
|
||||
touch_file: boolean; sets file's modification time to match photo date
|
||||
use_photos_export: boolean; if True forces the use of AppleScript to export even if photo not missing
|
||||
verbose_: Callable; verbose output function
|
||||
verbose: bool; print verbose output
|
||||
_mp_verbose: Callable; print verbose output for multiprocessing
|
||||
|
||||
Returns:
|
||||
list of path(s) of exported photo or None if photo was missing
|
||||
@@ -2683,8 +2895,7 @@ def export_photo(
|
||||
Raises:
|
||||
ValueError on invalid filename_template
|
||||
"""
|
||||
global VERBOSE
|
||||
VERBOSE = bool(verbose)
|
||||
verbose_ = _mp_verbose or verbose_
|
||||
|
||||
export_original = not (skip_original_if_edited and photo.hasadjustments)
|
||||
|
||||
@@ -2801,11 +3012,12 @@ def export_photo(
|
||||
)
|
||||
|
||||
results += export_photo_to_directory(
|
||||
photo=photo,
|
||||
dest=dest,
|
||||
album_keyword=album_keyword,
|
||||
convert_to_jpeg=convert_to_jpeg,
|
||||
description_template=description_template,
|
||||
dest_path=dest_path,
|
||||
dest=dest,
|
||||
download_missing=download_missing,
|
||||
dry_run=dry_run,
|
||||
edited=False,
|
||||
@@ -2830,7 +3042,6 @@ def export_photo(
|
||||
missing=missing_original,
|
||||
overwrite=overwrite,
|
||||
person_keyword=person_keyword,
|
||||
photo=photo,
|
||||
preview_if_missing=preview_if_missing,
|
||||
preview_suffix=rendered_preview_suffix,
|
||||
replace_keywords=replace_keywords,
|
||||
@@ -2839,9 +3050,11 @@ def export_photo(
|
||||
sidecar_flags=sidecar_flags,
|
||||
touch_file=touch_file,
|
||||
update=update,
|
||||
use_photos_export=use_photos_export,
|
||||
use_photokit=use_photokit,
|
||||
use_photos_export=use_photos_export,
|
||||
verbose_=verbose_,
|
||||
verbose=verbose,
|
||||
_mp_verbose=_mp_verbose,
|
||||
)
|
||||
|
||||
if export_edited and photo.hasadjustments:
|
||||
@@ -2913,11 +3126,12 @@ def export_photo(
|
||||
)
|
||||
|
||||
results += export_photo_to_directory(
|
||||
photo=photo,
|
||||
dest=dest,
|
||||
album_keyword=album_keyword,
|
||||
convert_to_jpeg=convert_to_jpeg,
|
||||
description_template=description_template,
|
||||
dest_path=dest_path,
|
||||
dest=dest,
|
||||
download_missing=download_missing,
|
||||
dry_run=dry_run,
|
||||
edited=True,
|
||||
@@ -2942,7 +3156,6 @@ def export_photo(
|
||||
missing=missing_edited,
|
||||
overwrite=overwrite,
|
||||
person_keyword=person_keyword,
|
||||
photo=photo,
|
||||
preview_if_missing=preview_if_missing,
|
||||
preview_suffix=rendered_preview_suffix,
|
||||
replace_keywords=replace_keywords,
|
||||
@@ -2951,9 +3164,11 @@ def export_photo(
|
||||
sidecar_flags=sidecar_flags if not export_original else 0,
|
||||
touch_file=touch_file,
|
||||
update=update,
|
||||
use_photos_export=use_photos_export,
|
||||
use_photokit=use_photokit,
|
||||
use_photos_export=use_photos_export,
|
||||
verbose_=verbose_,
|
||||
verbose=verbose,
|
||||
_mp_verbose=_mp_verbose,
|
||||
)
|
||||
|
||||
return results
|
||||
@@ -3037,13 +3252,17 @@ def export_photo_to_directory(
|
||||
use_photos_export,
|
||||
use_photokit,
|
||||
verbose,
|
||||
verbose_,
|
||||
_mp_verbose=None,
|
||||
):
|
||||
"""Export photo to directory dest_path"""
|
||||
# Need to pass the verbose_ method if for multiprocessing to work
|
||||
verbose_ = _mp_verbose or verbose_
|
||||
|
||||
results = ExportResults()
|
||||
# TODO: can be updated to let export do all the missing logic
|
||||
if export_original:
|
||||
if missing and not preview_if_missing:
|
||||
if missing and not any([preview_if_missing, download_missing, use_photos_export]):
|
||||
space = " " if not verbose else ""
|
||||
verbose_(
|
||||
f"{space}Skipping missing photo {photo.original_filename} ({photo.uuid})"
|
||||
@@ -3067,7 +3286,7 @@ def export_photo_to_directory(
|
||||
return results
|
||||
else:
|
||||
# exporting the edited version
|
||||
if missing and not preview_if_missing:
|
||||
if missing and not any([preview_if_missing, download_missing, use_photos_export]):
|
||||
space = " " if not verbose else ""
|
||||
verbose_(f"{space}Skipping missing edited photo for {filename}")
|
||||
results.missing.append(str(pathlib.Path(dest_path) / filename))
|
||||
@@ -4112,11 +4331,13 @@ def _list_libraries(json_=False, error=True):
|
||||
default=False,
|
||||
help="Include filename of selected photos in output",
|
||||
)
|
||||
def uuid(ctx, cli_obj, filename):
|
||||
def uuid_(ctx, cli_obj, filename):
|
||||
"""Print out unique IDs (UUID) of photos selected in Photos
|
||||
|
||||
Prints outs UUIDs in form suitable for --uuid-from-file and --skip-uuid-from-file
|
||||
"""
|
||||
# Note: This is named uuid_ because multiprocessing complains about use of photo.uuid if
|
||||
# this function is also called uuid. Something weird happenign with pickling.
|
||||
for photo in photoscript.PhotosLibrary().selection:
|
||||
if filename:
|
||||
print(f"# {photo.filename}")
|
||||
|
||||
@@ -11,6 +11,7 @@ import html
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
@@ -19,11 +20,12 @@ from functools import lru_cache # pylint: disable=syntax-error
|
||||
|
||||
__all__ = [
|
||||
"escape_str",
|
||||
"unescape_str",
|
||||
"terminate_exiftool",
|
||||
"get_exiftool_path",
|
||||
"exiftool_can_write",
|
||||
"ExifTool",
|
||||
"ExifToolCaching",
|
||||
"get_exiftool_path",
|
||||
"terminate_exiftool",
|
||||
"unescape_str",
|
||||
]
|
||||
|
||||
# exiftool -stay_open commands outputs this EOF marker after command is run
|
||||
@@ -33,6 +35,24 @@ EXIFTOOL_STAYOPEN_EOF_LEN = len(EXIFTOOL_STAYOPEN_EOF)
|
||||
# list of exiftool processes to cleanup when exiting or when terminate is called
|
||||
EXIFTOOL_PROCESSES = []
|
||||
|
||||
# exiftool supported file types, created by utils/exiftool_supported_types.py
|
||||
EXIFTOOL_FILETYPES_JSON = "exiftool_filetypes.json"
|
||||
with (pathlib.Path(__file__).parent / EXIFTOOL_FILETYPES_JSON).open("r") as f:
|
||||
EXIFTOOL_SUPPORTED_FILETYPES = json.load(f)
|
||||
|
||||
|
||||
def exiftool_can_write(suffix: str) -> bool:
|
||||
"""Return True if exiftool supports writing to a file with the given suffix, otherwise False"""
|
||||
if not suffix:
|
||||
return False
|
||||
suffix = suffix.lower()
|
||||
if suffix[0] == ".":
|
||||
suffix = suffix[1:]
|
||||
return (
|
||||
suffix in EXIFTOOL_SUPPORTED_FILETYPES
|
||||
and EXIFTOOL_SUPPORTED_FILETYPES[suffix]["write"]
|
||||
)
|
||||
|
||||
|
||||
def escape_str(s):
|
||||
"""escape string for use with exiftool -E"""
|
||||
|
||||
4976
osxphotos/exiftool_filetypes.json
Normal file
@@ -10,13 +10,17 @@ import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from io import StringIO
|
||||
from sqlite3 import Error
|
||||
from typing import Union
|
||||
|
||||
from ._constants import OSXPHOTOS_EXPORT_DB
|
||||
from ._version import __version__
|
||||
from .utils import normalize_fs_path
|
||||
|
||||
__all__ = ["ExportDB_ABC", "ExportDBNoOp", "ExportDB", "ExportDBInMemory"]
|
||||
|
||||
OSXPHOTOS_EXPORTDB_VERSION = "4.2"
|
||||
OSXPHOTOS_EXPORTDB_VERSION = "4.3"
|
||||
OSXPHOTOS_EXPORTDB_VERSION_MIGRATE_FILEPATH = "4.3"
|
||||
|
||||
OSXPHOTOS_ABOUT_STRING = f"Created by osxphotos version {__version__} (https://github.com/RhetTbull/osxphotos) on {datetime.datetime.now()}"
|
||||
|
||||
|
||||
@@ -113,6 +117,9 @@ class ExportDB_ABC(ABC):
|
||||
):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_connection(self):
|
||||
pass
|
||||
|
||||
class ExportDBNoOp(ExportDB_ABC):
|
||||
"""An ExportDB with NoOp methods"""
|
||||
@@ -192,6 +199,8 @@ class ExportDBNoOp(ExportDB_ABC):
|
||||
):
|
||||
pass
|
||||
|
||||
def get_connection(self):
|
||||
pass
|
||||
|
||||
class ExportDB(ExportDB_ABC):
|
||||
"""Interface to sqlite3 database used to store state information for osxphotos export command"""
|
||||
@@ -211,12 +220,13 @@ class ExportDB(ExportDB_ABC):
|
||||
"""query database for filename and return UUID
|
||||
returns None if filename not found in database
|
||||
"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
filepath_normalized = self._normalize_filepath_relative(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"SELECT uuid FROM files WHERE filepath_normalized = ?", (filename,)
|
||||
"SELECT uuid FROM files WHERE filepath_normalized = ?",
|
||||
(filepath_normalized,),
|
||||
)
|
||||
results = c.fetchone()
|
||||
uuid = results[0] if results else None
|
||||
@@ -228,8 +238,8 @@ class ExportDB(ExportDB_ABC):
|
||||
def set_uuid_for_file(self, filename, uuid):
|
||||
"""set UUID of filename to uuid in the database"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path))
|
||||
filename_normalized = filename.lower()
|
||||
conn = self._conn
|
||||
filename_normalized = self._normalize_filepath(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -245,11 +255,11 @@ class ExportDB(ExportDB_ABC):
|
||||
"""set stat info for filename
|
||||
filename: filename to set the stat info for
|
||||
stat: a tuple of length 3: mode, size, mtime"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -266,8 +276,8 @@ class ExportDB(ExportDB_ABC):
|
||||
"""get stat info for filename
|
||||
returns: tuple of (mode, size, mtime)
|
||||
"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -302,11 +312,11 @@ class ExportDB(ExportDB_ABC):
|
||||
"""set stat info for filename (after exiftool has updated it)
|
||||
filename: filename to set the stat info for
|
||||
stat: a tuple of length 3: mode, size, mtime"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -323,8 +333,8 @@ class ExportDB(ExportDB_ABC):
|
||||
"""get stat info for filename (after exiftool has updated it)
|
||||
returns: tuple of (mode, size, mtime)
|
||||
"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -357,7 +367,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_info_for_uuid(self, uuid):
|
||||
"""returns the info JSON struct for a UUID"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute("SELECT json_info FROM info WHERE uuid = ?", (uuid,))
|
||||
@@ -371,7 +381,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def set_info_for_uuid(self, uuid, info):
|
||||
"""sets the info JSON struct for a UUID"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -384,8 +394,8 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_exifdata_for_file(self, filename):
|
||||
"""returns the exifdata JSON struct for a file"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -402,8 +412,8 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def set_exifdata_for_file(self, filename, exifdata):
|
||||
"""sets the exifdata JSON struct for a file"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -416,8 +426,8 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_sidecar_for_file(self, filename):
|
||||
"""returns the sidecar data and signature for a file"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -444,8 +454,8 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def set_sidecar_for_file(self, filename, sidecar_data, sidecar_sig):
|
||||
"""sets the sidecar data and signature for a file"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -458,7 +468,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_previous_uuids(self):
|
||||
"""returns list of UUIDs of previously exported photos found in export database"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
previous_uuids = []
|
||||
try:
|
||||
c = conn.cursor()
|
||||
@@ -471,7 +481,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_detected_text_for_uuid(self, uuid):
|
||||
"""Get the detected_text for a uuid"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -488,7 +498,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def set_detected_text_for_uuid(self, uuid, text_json):
|
||||
"""Set the detected text for uuid"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -515,8 +525,8 @@ class ExportDB(ExportDB_ABC):
|
||||
):
|
||||
"""sets all the data for file and uuid at once; if any value is None, does not set it"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path))
|
||||
filename_normalized = filename.lower()
|
||||
conn = self._conn
|
||||
filename_normalized = self._normalize_filepath(filename)
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
# update files table (if needed);
|
||||
@@ -572,16 +582,23 @@ class ExportDB(ExportDB_ABC):
|
||||
def close(self):
|
||||
"""close the database connection"""
|
||||
try:
|
||||
self._conn.close()
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
self._conn = None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def get_connection(self):
|
||||
if self._conn is None:
|
||||
self._conn = self._open_export_db(self._dbfile)
|
||||
return self._conn
|
||||
|
||||
def _set_stat_for_file(self, table, filename, stats):
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"INSERT OR REPLACE INTO {table}(filepath_normalized, mode, size, mtime) VALUES (?, ?, ?, ?);",
|
||||
@@ -590,8 +607,8 @@ class ExportDB(ExportDB_ABC):
|
||||
conn.commit()
|
||||
|
||||
def _get_stat_for_file(self, table, filename):
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self.get_connection()
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"SELECT mode, size, mtime FROM {table} WHERE filepath_normalized = ?",
|
||||
@@ -626,10 +643,20 @@ class ExportDB(ExportDB_ABC):
|
||||
version_info = self._get_database_version(conn)
|
||||
if version_info[1] < OSXPHOTOS_EXPORTDB_VERSION:
|
||||
self._create_db_tables(conn)
|
||||
if version_info[1] < OSXPHOTOS_EXPORTDB_VERSION_MIGRATE_FILEPATH:
|
||||
self._migrate_normalized_filepath(conn)
|
||||
self.was_upgraded = (version_info[1], OSXPHOTOS_EXPORTDB_VERSION)
|
||||
else:
|
||||
self.was_upgraded = ()
|
||||
self.version = OSXPHOTOS_EXPORTDB_VERSION
|
||||
|
||||
# turn on performance optimizations
|
||||
c = conn.cursor()
|
||||
c.execute("PRAGMA journal_mode=WAL;")
|
||||
c.execute("PRAGMA synchronous=NORMAL;")
|
||||
c.execute("PRAGMA cache_size=-100000;")
|
||||
c.execute("PRAGMA temp_store=MEMORY;")
|
||||
|
||||
return conn
|
||||
|
||||
def _get_db_connection(self, dbfile):
|
||||
@@ -770,7 +797,7 @@ class ExportDB(ExportDB_ABC):
|
||||
cmd = sys.argv[0]
|
||||
args = " ".join(sys.argv[1:]) if len(sys.argv) > 1 else ""
|
||||
cwd = os.getcwd()
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -782,6 +809,32 @@ class ExportDB(ExportDB_ABC):
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def _normalize_filepath(self, filepath: Union[str, pathlib.Path]) -> str:
|
||||
"""normalize filepath for unicode, lower case"""
|
||||
return normalize_fs_path(str(filepath)).lower()
|
||||
|
||||
def _normalize_filepath_relative(self, filepath: Union[str, pathlib.Path]) -> str:
|
||||
"""normalize filepath for unicode, relative path (to export dir), lower case"""
|
||||
filepath = str(pathlib.Path(filepath).relative_to(self._path))
|
||||
return normalize_fs_path(str(filepath)).lower()
|
||||
|
||||
def _migrate_normalized_filepath(self, conn):
|
||||
"""Fix all filepath_normalized columns for unicode normalization"""
|
||||
# Prior to database version 4.3, filepath_normalized was not normalized for unicode
|
||||
c = conn.cursor()
|
||||
for table in ["converted", "edited", "exifdata", "files", "sidecar"]:
|
||||
old_values = c.execute(
|
||||
f"SELECT filepath_normalized, id FROM {table}"
|
||||
).fetchall()
|
||||
new_values = [
|
||||
(self._normalize_filepath(filepath_normalized), id_)
|
||||
for filepath_normalized, id_ in old_values
|
||||
]
|
||||
c.executemany(
|
||||
f"UPDATE {table} SET filepath_normalized=? WHERE id=?", new_values
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
class ExportDBInMemory(ExportDB):
|
||||
"""In memory version of ExportDB
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
""" utility functions for validating/sanitizing path components """
|
||||
|
||||
import re
|
||||
|
||||
import pathvalidate
|
||||
|
||||
from ._constants import MAX_DIRNAME_LEN, MAX_FILENAME_LEN
|
||||
|
||||
__all__ = [
|
||||
"sanitize_filepath",
|
||||
"is_valid_filepath",
|
||||
"sanitize_filename",
|
||||
"sanitize_dirname",
|
||||
"sanitize_filename",
|
||||
"sanitize_filepath",
|
||||
"sanitize_filestem_with_count",
|
||||
"sanitize_pathpart",
|
||||
]
|
||||
|
||||
@@ -53,6 +56,26 @@ def sanitize_filename(filename, replacement=":"):
|
||||
return filename
|
||||
|
||||
|
||||
def sanitize_filestem_with_count(file_stem: str, file_suffix: str) -> str:
|
||||
"""Sanitize a filestem that may end in (1), (2), etc. to ensure it + file_suffix doesn't exceed MAX_FILENAME_LEN"""
|
||||
filename_len = len(file_stem) + len(file_suffix)
|
||||
if filename_len <= MAX_FILENAME_LEN:
|
||||
return file_stem
|
||||
|
||||
drop = filename_len - MAX_FILENAME_LEN
|
||||
match = re.match(r"(.*)(\(\d+\))$", file_stem)
|
||||
if not match:
|
||||
# filename doesn't end in (1), (2), etc.
|
||||
# truncate filename to MAX_FILENAME_LEN
|
||||
return file_stem[:-drop]
|
||||
|
||||
# filename ends in (1), (2), etc.
|
||||
file_stem = match.group(1)
|
||||
file_count = match.group(2)
|
||||
file_stem = file_stem[:-drop]
|
||||
return f"{file_stem}{file_count}"
|
||||
|
||||
|
||||
def sanitize_dirname(dirname, replacement=":"):
|
||||
"""replace any illegal characters in a directory name and truncate directory name if needed
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
|
||||
|
||||
import dataclasses
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
@@ -33,7 +32,7 @@ from ._constants import (
|
||||
)
|
||||
from ._version import __version__
|
||||
from .datetime_utils import datetime_tz_to_utc
|
||||
from .exiftool import ExifTool
|
||||
from .exiftool import ExifTool, exiftool_can_write
|
||||
from .export_db import ExportDB_ABC, ExportDBNoOp
|
||||
from .fileutil import FileUtil
|
||||
from .photokit import (
|
||||
@@ -348,6 +347,34 @@ class ExportResults:
|
||||
+ ")"
|
||||
)
|
||||
|
||||
def asdict(self):
|
||||
"""Return dict instance of class"""
|
||||
return {
|
||||
"exported": self.exported,
|
||||
"new": self.new,
|
||||
"updated": self.updated,
|
||||
"skipped": self.skipped,
|
||||
"exif_updated": self.exif_updated,
|
||||
"touched": self.touched,
|
||||
"to_touch": self.to_touch,
|
||||
"converted_to_jpeg": self.converted_to_jpeg,
|
||||
"sidecar_json_written": self.sidecar_json_written,
|
||||
"sidecar_json_skipped": self.sidecar_json_skipped,
|
||||
"sidecar_exiftool_written": self.sidecar_exiftool_written,
|
||||
"sidecar_exiftool_skipped": self.sidecar_exiftool_skipped,
|
||||
"sidecar_xmp_written": self.sidecar_xmp_written,
|
||||
"sidecar_xmp_skipped": self.sidecar_xmp_skipped,
|
||||
"missing": self.missing,
|
||||
"error": self.error,
|
||||
"exiftool_warning": self.exiftool_warning,
|
||||
"exiftool_error": self.exiftool_error,
|
||||
"deleted_files": self.deleted_files,
|
||||
"deleted_directories": self.deleted_directories,
|
||||
"exported_album": self.exported_album,
|
||||
"skipped_album": self.skipped_album,
|
||||
"missing_album": self.missing_album,
|
||||
}
|
||||
|
||||
|
||||
class PhotoExporter:
|
||||
def __init__(self, photo: "PhotoInfo"):
|
||||
@@ -508,7 +535,7 @@ class PhotoExporter:
|
||||
preview_name = (
|
||||
preview_name
|
||||
if options.overwrite or options.update
|
||||
else pathlib.Path(increment_filename(preview_name))
|
||||
else pathlib.Path(increment_filename(preview_name, lock=True))
|
||||
)
|
||||
all_results += self._export_photo(
|
||||
preview_path,
|
||||
@@ -521,6 +548,13 @@ class PhotoExporter:
|
||||
if options.touch_file:
|
||||
all_results += self._touch_files(all_results, options)
|
||||
|
||||
# if src was missing, there will be a lock file for dest that needs cleaning up
|
||||
try:
|
||||
lock_file = dest.parent / f".{dest.name}.lock"
|
||||
self.fileutil.unlink(lock_file)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return all_results
|
||||
|
||||
def _touch_files(
|
||||
@@ -579,7 +613,9 @@ class PhotoExporter:
|
||||
# if file1.png exists and exporting file1.jpeg,
|
||||
# dest will be file1 (1).jpeg even though file1.jpeg doesn't exist to prevent sidecar collision
|
||||
if options.increment and not options.update and not options.overwrite:
|
||||
return pathlib.Path(increment_filename(dest))
|
||||
return pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# if update and file exists, need to check to see if it's the write file by checking export db
|
||||
if options.update and dest.exists() and src:
|
||||
@@ -622,7 +658,9 @@ class PhotoExporter:
|
||||
break
|
||||
else:
|
||||
# increment the destination file
|
||||
dest = pathlib.Path(increment_filename(dest))
|
||||
dest = pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# either dest was updated in the if clause above or not updated at all
|
||||
return dest
|
||||
@@ -816,7 +854,9 @@ class PhotoExporter:
|
||||
raise ValueError("Edited version requested but photo has no adjustments")
|
||||
|
||||
dest = self._temp_dir_path / self.photo.original_filename
|
||||
dest = pathlib.Path(increment_filename(dest))
|
||||
dest = pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# export live_photo .mov file?
|
||||
live_photo = bool(options.live_photo and self.photo.live_photo)
|
||||
@@ -916,7 +956,7 @@ class PhotoExporter:
|
||||
"""Copies filepath to a temp file preserving access and modification times"""
|
||||
filepath = pathlib.Path(filepath)
|
||||
dest = self._temp_dir_path / filepath.name
|
||||
dest = increment_filename(dest)
|
||||
dest = increment_filename(dest, lock=True)
|
||||
self.fileutil.copy(filepath, dest)
|
||||
stat = os.stat(filepath)
|
||||
self.fileutil.utime(dest, (stat.st_atime, stat.st_mtime))
|
||||
@@ -1081,7 +1121,9 @@ class PhotoExporter:
|
||||
# convert to a temp file before copying
|
||||
tmp_file = increment_filename(
|
||||
self._temp_dir_path
|
||||
/ f"{pathlib.Path(src).stem}_converted_to_jpeg.jpeg"
|
||||
/ f"{pathlib.Path(src).stem}_converted_to_jpeg.jpeg",
|
||||
lock=True,
|
||||
dry_run=options.dry_run,
|
||||
)
|
||||
fileutil.convert_to_jpeg(
|
||||
src, tmp_file, compression_quality=options.jpeg_quality
|
||||
@@ -1112,6 +1154,20 @@ class PhotoExporter:
|
||||
info_json=self.photo.json(),
|
||||
)
|
||||
|
||||
# clean up lock files
|
||||
for file_ in set(
|
||||
converted_to_jpeg_files
|
||||
+ exported_files
|
||||
+ update_new_files
|
||||
+ update_updated_files
|
||||
):
|
||||
try:
|
||||
file_ = pathlib.Path(file_)
|
||||
lock_file = str(file_.parent / f".{file_.name}.lock")
|
||||
fileutil.unlink(lock_file)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ExportResults(
|
||||
converted_to_jpeg=converted_to_jpeg_files,
|
||||
error=exif_results.error,
|
||||
@@ -1260,11 +1316,27 @@ class PhotoExporter:
|
||||
|
||||
exiftool_results = ExportResults()
|
||||
|
||||
# don't try to write if unsupported file type for exiftool
|
||||
if not exiftool_can_write(os.path.splitext(src)[-1]):
|
||||
exiftool_results.exiftool_warning.append(
|
||||
(
|
||||
dest,
|
||||
f"Unsupported file type for exiftool, skipping exiftool for {dest}",
|
||||
)
|
||||
)
|
||||
# set file signature so the file doesn't get re-exported with --update
|
||||
export_db.set_data(
|
||||
dest,
|
||||
uuid=self.photo.uuid,
|
||||
exif_stat=fileutil.file_sig(src),
|
||||
exif_json=self._exiftool_json_sidecar(options=options),
|
||||
)
|
||||
return exiftool_results
|
||||
|
||||
# determine if we need to write the exif metadata
|
||||
# if we are not updating, we always write
|
||||
# else, need to check the database to determine if we need to write
|
||||
run_exiftool = not options.update
|
||||
current_data = "foo"
|
||||
if options.update:
|
||||
files_are_different = False
|
||||
old_data = export_db.get_exifdata_for_file(dest)
|
||||
|
||||
@@ -39,6 +39,7 @@ from .._constants import (
|
||||
_PHOTOS_5_PROJECT_ALBUM_KIND,
|
||||
_PHOTOS_5_ROOT_FOLDER_KIND,
|
||||
_PHOTOS_5_SHARED_ALBUM_KIND,
|
||||
_PHOTOS_5_VERSION,
|
||||
_TESTED_OS_VERSIONS,
|
||||
_UNKNOWN_PERSON,
|
||||
BURST_KEY,
|
||||
@@ -659,14 +660,18 @@ class PhotosDB:
|
||||
|
||||
for person in c:
|
||||
pk = person[0]
|
||||
fullname = person[2] if person[2] is not None else _UNKNOWN_PERSON
|
||||
fullname = (
|
||||
normalize_unicode(person[2])
|
||||
if person[2] is not None
|
||||
else _UNKNOWN_PERSON
|
||||
)
|
||||
self._dbpersons_pk[pk] = {
|
||||
"pk": pk,
|
||||
"uuid": person[1],
|
||||
"fullname": fullname,
|
||||
"facecount": person[3],
|
||||
"keyface": person[5],
|
||||
"displayname": person[4],
|
||||
"displayname": normalize_unicode(person[4]),
|
||||
"photo_uuid": None,
|
||||
"keyface_uuid": None,
|
||||
}
|
||||
@@ -733,13 +738,6 @@ class PhotosDB:
|
||||
except KeyError:
|
||||
self._dbfaces_pk[pk] = [uuid]
|
||||
|
||||
if _debug():
|
||||
logging.debug(f"Finished walking through persons")
|
||||
logging.debug(pformat(self._dbpersons_pk))
|
||||
logging.debug(pformat(self._dbpersons_fullname))
|
||||
logging.debug(pformat(self._dbfaces_pk))
|
||||
logging.debug(pformat(self._dbfaces_uuid))
|
||||
|
||||
# Get info on albums
|
||||
verbose("Processing albums.")
|
||||
c.execute(
|
||||
@@ -876,14 +874,6 @@ class PhotosDB:
|
||||
else:
|
||||
self._dbalbum_folders[album] = {}
|
||||
|
||||
if _debug():
|
||||
logging.debug(f"Finished walking through albums")
|
||||
logging.debug(pformat(self._dbalbums_album))
|
||||
logging.debug(pformat(self._dbalbums_uuid))
|
||||
logging.debug(pformat(self._dbalbum_details))
|
||||
logging.debug(pformat(self._dbalbum_folders))
|
||||
logging.debug(pformat(self._dbfolder_details))
|
||||
|
||||
# Get info on keywords
|
||||
verbose("Processing keywords.")
|
||||
c.execute(
|
||||
@@ -899,13 +889,16 @@ class PhotosDB:
|
||||
RKMaster.uuid = RKVersion.masterUuid
|
||||
"""
|
||||
)
|
||||
for keyword in c:
|
||||
if not keyword[1] in self._dbkeywords_uuid:
|
||||
self._dbkeywords_uuid[keyword[1]] = []
|
||||
if not keyword[0] in self._dbkeywords_keyword:
|
||||
self._dbkeywords_keyword[keyword[0]] = []
|
||||
self._dbkeywords_uuid[keyword[1]].append(keyword[0])
|
||||
self._dbkeywords_keyword[keyword[0]].append(keyword[1])
|
||||
for keyword_title, keyword_uuid, _ in c:
|
||||
keyword_title = normalize_unicode(keyword_title)
|
||||
try:
|
||||
self._dbkeywords_uuid[keyword_uuid].append(keyword_title)
|
||||
except KeyError:
|
||||
self._dbkeywords_uuid[keyword_uuid] = [keyword_title]
|
||||
try:
|
||||
self._dbkeywords_keyword[keyword_title].append(keyword_uuid)
|
||||
except KeyError:
|
||||
self._dbkeywords_keyword[keyword_title] = [keyword_uuid]
|
||||
|
||||
# Get info on disk volumes
|
||||
c.execute("select RKVolume.modelId, RKVolume.name from RKVolume")
|
||||
@@ -1027,13 +1020,11 @@ class PhotosDB:
|
||||
|
||||
for row in c:
|
||||
uuid = row[0]
|
||||
if _debug():
|
||||
logging.debug(f"uuid = '{uuid}, master = '{row[2]}")
|
||||
self._dbphotos[uuid] = {}
|
||||
self._dbphotos[uuid]["_uuid"] = uuid # stored here for easier debugging
|
||||
self._dbphotos[uuid]["modelID"] = row[1]
|
||||
self._dbphotos[uuid]["masterUuid"] = row[2]
|
||||
self._dbphotos[uuid]["filename"] = row[3]
|
||||
self._dbphotos[uuid]["filename"] = normalize_unicode(row[3])
|
||||
|
||||
# There are sometimes negative values for lastmodifieddate in the database
|
||||
# I don't know what these mean but they will raise exception in datetime if
|
||||
@@ -1272,13 +1263,13 @@ class PhotosDB:
|
||||
info["volumeId"] = row[1]
|
||||
info["imagePath"] = row[2]
|
||||
info["isMissing"] = row[3]
|
||||
info["originalFilename"] = row[4]
|
||||
info["originalFilename"] = normalize_unicode(row[4])
|
||||
info["UTI"] = row[5]
|
||||
info["modelID"] = row[6]
|
||||
info["fileSize"] = row[7]
|
||||
info["isTrulyRAW"] = row[8]
|
||||
info["alternateMasterUuid"] = row[9]
|
||||
info["filename"] = row[10]
|
||||
info["filename"] = normalize_unicode(row[10])
|
||||
self._dbphotos_master[uuid] = info
|
||||
|
||||
# get details needed to find path of the edited photos
|
||||
@@ -1550,39 +1541,6 @@ class PhotosDB:
|
||||
|
||||
# done processing, dump debug data if requested
|
||||
verbose("Done processing details from Photos library.")
|
||||
if _debug():
|
||||
logging.debug("Faces (_dbfaces_uuid):")
|
||||
logging.debug(pformat(self._dbfaces_uuid))
|
||||
|
||||
logging.debug("Persons (_dbpersons_pk):")
|
||||
logging.debug(pformat(self._dbpersons_pk))
|
||||
|
||||
logging.debug("Keywords by uuid (_dbkeywords_uuid):")
|
||||
logging.debug(pformat(self._dbkeywords_uuid))
|
||||
|
||||
logging.debug("Keywords by keyword (_dbkeywords_keywords):")
|
||||
logging.debug(pformat(self._dbkeywords_keyword))
|
||||
|
||||
logging.debug("Albums by uuid (_dbalbums_uuid):")
|
||||
logging.debug(pformat(self._dbalbums_uuid))
|
||||
|
||||
logging.debug("Albums by album (_dbalbums_albums):")
|
||||
logging.debug(pformat(self._dbalbums_album))
|
||||
|
||||
logging.debug("Album details (_dbalbum_details):")
|
||||
logging.debug(pformat(self._dbalbum_details))
|
||||
|
||||
logging.debug("Album titles (_dbalbum_titles):")
|
||||
logging.debug(pformat(self._dbalbum_titles))
|
||||
|
||||
logging.debug("Volumes (_dbvolumes):")
|
||||
logging.debug(pformat(self._dbvolumes))
|
||||
|
||||
logging.debug("Photos (_dbphotos):")
|
||||
logging.debug(pformat(self._dbphotos))
|
||||
|
||||
logging.debug("Burst Photos (dbphotos_burst:")
|
||||
logging.debug(pformat(self._dbphotos_burst))
|
||||
|
||||
def _build_album_folder_hierarchy_4(self, uuid, folders=None):
|
||||
"""recursively build folder/album hierarchy
|
||||
@@ -1673,7 +1631,7 @@ class PhotosDB:
|
||||
for person in c:
|
||||
pk = person[0]
|
||||
fullname = (
|
||||
person[2]
|
||||
normalize_unicode(person[2])
|
||||
if (person[2] != "" and person[2] is not None)
|
||||
else _UNKNOWN_PERSON
|
||||
)
|
||||
@@ -1683,7 +1641,7 @@ class PhotosDB:
|
||||
"fullname": fullname,
|
||||
"facecount": person[3],
|
||||
"keyface": person[4],
|
||||
"displayname": person[5],
|
||||
"displayname": normalize_unicode(person[5]),
|
||||
"photo_uuid": None,
|
||||
"keyface_uuid": None,
|
||||
}
|
||||
@@ -1747,13 +1705,6 @@ class PhotosDB:
|
||||
except KeyError:
|
||||
self._dbfaces_pk[pk] = [uuid]
|
||||
|
||||
if _debug():
|
||||
logging.debug(f"Finished walking through persons")
|
||||
logging.debug(pformat(self._dbpersons_pk))
|
||||
logging.debug(pformat(self._dbpersons_fullname))
|
||||
logging.debug(pformat(self._dbfaces_pk))
|
||||
logging.debug(pformat(self._dbfaces_uuid))
|
||||
|
||||
# get details about albums
|
||||
verbose("Processing albums.")
|
||||
c.execute(
|
||||
@@ -1870,13 +1821,6 @@ class PhotosDB:
|
||||
# shared albums can't be in folders
|
||||
self._dbalbum_folders[album] = []
|
||||
|
||||
if _debug():
|
||||
logging.debug(f"Finished walking through albums")
|
||||
logging.debug(pformat(self._dbalbums_album))
|
||||
logging.debug(pformat(self._dbalbums_uuid))
|
||||
logging.debug(pformat(self._dbalbum_details))
|
||||
logging.debug(pformat(self._dbalbum_folders))
|
||||
|
||||
# get details on keywords
|
||||
verbose("Processing keywords.")
|
||||
c.execute(
|
||||
@@ -1886,29 +1830,22 @@ class PhotosDB:
|
||||
JOIN Z_1KEYWORDS ON Z_1KEYWORDS.Z_1ASSETATTRIBUTES = ZADDITIONALASSETATTRIBUTES.Z_PK
|
||||
JOIN ZKEYWORD ON ZKEYWORD.Z_PK = {keyword_join} """
|
||||
)
|
||||
for keyword in c:
|
||||
keyword_title = normalize_unicode(keyword[0])
|
||||
if not keyword[1] in self._dbkeywords_uuid:
|
||||
self._dbkeywords_uuid[keyword[1]] = []
|
||||
if not keyword_title in self._dbkeywords_keyword:
|
||||
self._dbkeywords_keyword[keyword_title] = []
|
||||
self._dbkeywords_uuid[keyword[1]].append(keyword[0])
|
||||
self._dbkeywords_keyword[keyword_title].append(keyword[1])
|
||||
|
||||
if _debug():
|
||||
logging.debug(f"Finished walking through keywords")
|
||||
logging.debug(pformat(self._dbkeywords_keyword))
|
||||
logging.debug(pformat(self._dbkeywords_uuid))
|
||||
for keyword_title, keyword_uuid in c:
|
||||
keyword_title = normalize_unicode(keyword_title)
|
||||
try:
|
||||
self._dbkeywords_uuid[keyword_uuid].append(keyword_title)
|
||||
except KeyError:
|
||||
self._dbkeywords_uuid[keyword_uuid] = [keyword_title]
|
||||
try:
|
||||
self._dbkeywords_keyword[keyword_title].append(keyword_uuid)
|
||||
except KeyError:
|
||||
self._dbkeywords_keyword[keyword_title] = [keyword_uuid]
|
||||
|
||||
# get details on disk volumes
|
||||
c.execute("SELECT ZUUID, ZNAME from ZFILESYSTEMVOLUME")
|
||||
for vol in c:
|
||||
self._dbvolumes[vol[0]] = vol[1]
|
||||
|
||||
if _debug():
|
||||
logging.debug(f"Finished walking through volumes")
|
||||
logging.debug(self._dbvolumes)
|
||||
|
||||
# get details about photos
|
||||
verbose("Processing photo details.")
|
||||
c.execute(
|
||||
@@ -2042,8 +1979,8 @@ class PhotosDB:
|
||||
|
||||
info["hidden"] = row[9]
|
||||
info["favorite"] = row[10]
|
||||
info["originalFilename"] = row[3]
|
||||
info["filename"] = row[12]
|
||||
info["originalFilename"] = normalize_unicode(row[3])
|
||||
info["filename"] = normalize_unicode(row[12])
|
||||
info["directory"] = row[11]
|
||||
|
||||
# set latitude and longitude
|
||||
@@ -2521,48 +2458,6 @@ class PhotosDB:
|
||||
|
||||
# done processing, dump debug data if requested
|
||||
verbose("Done processing details from Photos library.")
|
||||
if _debug():
|
||||
logging.debug("Faces (_dbfaces_uuid):")
|
||||
logging.debug(pformat(self._dbfaces_uuid))
|
||||
|
||||
logging.debug("Persons (_dbpersons_pk):")
|
||||
logging.debug(pformat(self._dbpersons_pk))
|
||||
|
||||
logging.debug("Keywords by uuid (_dbkeywords_uuid):")
|
||||
logging.debug(pformat(self._dbkeywords_uuid))
|
||||
|
||||
logging.debug("Keywords by keyword (_dbkeywords_keywords):")
|
||||
logging.debug(pformat(self._dbkeywords_keyword))
|
||||
|
||||
logging.debug("Albums by uuid (_dbalbums_uuid):")
|
||||
logging.debug(pformat(self._dbalbums_uuid))
|
||||
|
||||
logging.debug("Albums by album (_dbalbums_albums):")
|
||||
logging.debug(pformat(self._dbalbums_album))
|
||||
|
||||
logging.debug("Album details (_dbalbum_details):")
|
||||
logging.debug(pformat(self._dbalbum_details))
|
||||
|
||||
logging.debug("Album titles (_dbalbum_titles):")
|
||||
logging.debug(pformat(self._dbalbum_titles))
|
||||
|
||||
logging.debug("Album folders (_dbalbum_folders):")
|
||||
logging.debug(pformat(self._dbalbum_folders))
|
||||
|
||||
logging.debug("Album parent folders (_dbalbum_parent_folders):")
|
||||
logging.debug(pformat(self._dbalbum_parent_folders))
|
||||
|
||||
logging.debug("Albums pk (_dbalbums_pk):")
|
||||
logging.debug(pformat(self._dbalbums_pk))
|
||||
|
||||
logging.debug("Volumes (_dbvolumes):")
|
||||
logging.debug(pformat(self._dbvolumes))
|
||||
|
||||
logging.debug("Photos (_dbphotos):")
|
||||
logging.debug(pformat(self._dbphotos))
|
||||
|
||||
logging.debug("Burst Photos (dbphotos_burst:")
|
||||
logging.debug(pformat(self._dbphotos_burst))
|
||||
|
||||
def _process_moments(self):
|
||||
"""Process data from ZMOMENT table"""
|
||||
@@ -2623,8 +2518,8 @@ class PhotosDB:
|
||||
moment_info["modificationDate"] = row[6]
|
||||
moment_info["representativeDate"] = row[7]
|
||||
moment_info["startDate"] = row[8]
|
||||
moment_info["subtitle"] = row[9]
|
||||
moment_info["title"] = row[10]
|
||||
moment_info["subtitle"] = normalize_unicode(row[9])
|
||||
moment_info["title"] = normalize_unicode(row[10])
|
||||
moment_info["uuid"] = row[11]
|
||||
|
||||
# if both lat/lon == -180, then it means location undefined
|
||||
@@ -3027,6 +2922,7 @@ class PhotosDB:
|
||||
if keywords:
|
||||
keyword_set = set()
|
||||
for keyword in keywords:
|
||||
keyword = normalize_unicode(keyword)
|
||||
if keyword in self._dbkeywords_keyword:
|
||||
keyword_set.update(self._dbkeywords_keyword[keyword])
|
||||
photos_sets.append(keyword_set)
|
||||
@@ -3034,6 +2930,7 @@ class PhotosDB:
|
||||
if persons:
|
||||
person_set = set()
|
||||
for person in persons:
|
||||
person = normalize_unicode(person)
|
||||
if person in self._dbpersons_fullname:
|
||||
for pk in self._dbpersons_fullname[person]:
|
||||
try:
|
||||
@@ -3076,8 +2973,6 @@ class PhotosDB:
|
||||
):
|
||||
info = PhotoInfo(db=self, uuid=p, info=self._dbphotos[p])
|
||||
photoinfo.append(info)
|
||||
if _debug:
|
||||
logging.debug(f"photoinfo: {pformat(photoinfo)}")
|
||||
|
||||
return photoinfo
|
||||
|
||||
@@ -3414,23 +3309,35 @@ class PhotosDB:
|
||||
# case-insensitive
|
||||
for n in name:
|
||||
n = n.lower()
|
||||
photo_list.extend(
|
||||
[
|
||||
p
|
||||
for p in photos
|
||||
if n in p.filename.lower()
|
||||
or n in p.original_filename.lower()
|
||||
]
|
||||
)
|
||||
if self._db_version >= _PHOTOS_5_VERSION:
|
||||
# search only original_filename (#594)
|
||||
photo_list.extend(
|
||||
[p for p in photos if n in p.original_filename.lower()]
|
||||
)
|
||||
else:
|
||||
photo_list.extend(
|
||||
[
|
||||
p
|
||||
for p in photos
|
||||
if n in p.filename.lower()
|
||||
or n in p.original_filename.lower()
|
||||
]
|
||||
)
|
||||
else:
|
||||
for n in name:
|
||||
photo_list.extend(
|
||||
[
|
||||
p
|
||||
for p in photos
|
||||
if n in p.filename or n in p.original_filename
|
||||
]
|
||||
)
|
||||
if self._db_version >= _PHOTOS_5_VERSION:
|
||||
# search only original_filename (#594)
|
||||
photo_list.extend(
|
||||
[p for p in photos if n in p.original_filename]
|
||||
)
|
||||
else:
|
||||
photo_list.extend(
|
||||
[
|
||||
p
|
||||
for p in photos
|
||||
if n in p.filename or n in p.original_filename
|
||||
]
|
||||
)
|
||||
photos = photo_list
|
||||
|
||||
if options.min_size:
|
||||
|
||||
@@ -17,13 +17,14 @@ import sys
|
||||
import unicodedata
|
||||
import urllib.parse
|
||||
from plistlib import load as plistload
|
||||
from typing import Callable, List, Union, Optional
|
||||
from typing import Callable, List, Optional, Union
|
||||
|
||||
import CoreFoundation
|
||||
import objc
|
||||
from Foundation import NSFileManager, NSPredicate, NSString
|
||||
|
||||
from ._constants import UNICODE_FORMAT
|
||||
from .path_utils import sanitize_filestem_with_count
|
||||
|
||||
__all__ = [
|
||||
"dd_to_dms_str",
|
||||
@@ -428,7 +429,10 @@ def normalize_unicode(value):
|
||||
|
||||
|
||||
def increment_filename_with_count(
|
||||
filepath: Union[str, pathlib.Path], count: int = 0
|
||||
filepath: Union[str, pathlib.Path],
|
||||
count: int = 0,
|
||||
lock: bool = False,
|
||||
dry_run: bool = False,
|
||||
) -> str:
|
||||
"""Return filename (1).ext, etc if filename.ext exists
|
||||
|
||||
@@ -438,6 +442,8 @@ def increment_filename_with_count(
|
||||
Args:
|
||||
filepath: str or pathlib.Path; full path, including file name
|
||||
count: int; starting increment value
|
||||
lock: bool; if True, create a lock file in form .filename.lock to prevent other processes from using the same filename
|
||||
dry_run: bool; if True, don't actually create lock file
|
||||
|
||||
Returns:
|
||||
tuple of new filepath (or same if not incremented), count
|
||||
@@ -449,15 +455,32 @@ def increment_filename_with_count(
|
||||
dest_files = [f.stem.lower() for f in dest_files]
|
||||
dest_new = f"{dest.stem} ({count})" if count else dest.stem
|
||||
dest_new = normalize_fs_path(dest_new)
|
||||
dest_new = sanitize_filestem_with_count(dest_new, dest.suffix)
|
||||
if lock and not dry_run:
|
||||
dest_lock = "." + dest_new + dest.suffix + ".lock"
|
||||
dest_lock = dest.parent / dest_lock
|
||||
else:
|
||||
dest_lock = pathlib.Path("")
|
||||
|
||||
while dest_new.lower() in dest_files:
|
||||
while dest_new.lower() in dest_files or (
|
||||
lock and not dry_run and dest_lock.exists()
|
||||
):
|
||||
count += 1
|
||||
dest_new = normalize_fs_path(f"{dest.stem} ({count})")
|
||||
dest_new = sanitize_filestem_with_count(dest_new, dest.suffix)
|
||||
if lock:
|
||||
dest_lock = "." + dest_new + dest.suffix + ".lock"
|
||||
dest_lock = dest.parent / dest_lock
|
||||
if lock and not dry_run:
|
||||
dest_lock.touch()
|
||||
dest = dest.parent / f"{dest_new}{dest.suffix}"
|
||||
|
||||
return normalize_fs_path(str(dest)), count
|
||||
|
||||
|
||||
def increment_filename(filepath: Union[str, pathlib.Path]) -> str:
|
||||
def increment_filename(
|
||||
filepath: Union[str, pathlib.Path], lock: bool = False, dry_run: bool = False
|
||||
) -> str:
|
||||
"""Return filename (1).ext, etc if filename.ext exists
|
||||
|
||||
If file exists in filename's parent folder with same stem as filename,
|
||||
@@ -465,13 +488,17 @@ def increment_filename(filepath: Union[str, pathlib.Path]) -> str:
|
||||
|
||||
Args:
|
||||
filepath: str or pathlib.Path; full path, including file name
|
||||
lock: bool; if True, creates a lock file in form .filename.lock to prevent other processes from using the same filename
|
||||
dry_run: bool; if True, don't actually create lock file
|
||||
|
||||
Returns:
|
||||
new filepath (or same if not incremented)
|
||||
|
||||
Note: This obviously is subject to race condition so using with caution.
|
||||
Note: This obviously is subject to race condition so using with caution but using lock=True reduces the risk of race condition (but lock files must be cleaned up)
|
||||
"""
|
||||
new_filepath, _ = increment_filename_with_count(filepath)
|
||||
new_filepath, _ = increment_filename_with_count(
|
||||
filepath, lock=lock, dry_run=dry_run
|
||||
)
|
||||
return new_filepath
|
||||
|
||||
|
||||
|
||||
8
setup.py
@@ -74,12 +74,11 @@ setup(
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
],
|
||||
install_requires=[
|
||||
"Click>=8.0.1,<9.0",
|
||||
"Mako>=1.1.4,<1.2.0",
|
||||
"PyYAML>=5.4.1,<5.5.0",
|
||||
"bitmath>=1.3.3.1,<1.4.0.0",
|
||||
"bpylist2==3.0.2",
|
||||
"Click>=8.0.1,<9.0",
|
||||
"dataclasses==0.7;python_version<'3.7'",
|
||||
"Mako>=1.1.4,<1.2.0",
|
||||
"more-itertools>=8.8.0,<9.0.0",
|
||||
"objexplore>=1.5.5,<1.6.0",
|
||||
"osxmetadata>=0.99.34,<1.0.0",
|
||||
@@ -87,15 +86,16 @@ setup(
|
||||
"photoscript>=0.1.4,<0.2.0",
|
||||
"ptpython>=3.0.20,<4.0.0",
|
||||
"pyobjc-core>=7.3,<9.0",
|
||||
"pyobjc-framework-AVFoundation>=7.3,<9.0",
|
||||
"pyobjc-framework-AppleScriptKit>=7.3,<9.0",
|
||||
"pyobjc-framework-AppleScriptObjC>=7.3,<9.0",
|
||||
"pyobjc-framework-AVFoundation>=7.3,<9.0",
|
||||
"pyobjc-framework-Cocoa>=7.3,<9.0",
|
||||
"pyobjc-framework-CoreServices>=7.2,<9.0",
|
||||
"pyobjc-framework-Metal>=7.3,<9.0",
|
||||
"pyobjc-framework-Photos>=7.3,<9.0",
|
||||
"pyobjc-framework-Quartz>=7.3,<9.0",
|
||||
"pyobjc-framework-Vision>=7.3,<9.0",
|
||||
"PyYAML>=5.4.1,<5.5.0",
|
||||
"rich>=10.6.0,<=11.0.0",
|
||||
"textx>=2.3.0,<3.0.0",
|
||||
"toml>=0.10.2,<0.11.0",
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<key>hostuuid</key>
|
||||
<string>585B80BF-8D1F-55EF-A9E8-6CF4E5523959</string>
|
||||
<key>pid</key>
|
||||
<integer>1961</integer>
|
||||
<integer>14817</integer>
|
||||
<key>processname</key>
|
||||
<string>photolibraryd</string>
|
||||
<key>uid</key>
|
||||
|
||||
|
After Width: | Height: | Size: 2.1 MiB |
|
After Width: | Height: | Size: 2.8 MiB |
|
After Width: | Height: | Size: 2.3 MiB |
|
After Width: | Height: | Size: 2.8 MiB |
@@ -3,24 +3,24 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>BackgroundHighlightCollection</key>
|
||||
<date>2021-09-14T04:40:42Z</date>
|
||||
<date>2022-02-04T13:51:40Z</date>
|
||||
<key>BackgroundHighlightEnrichment</key>
|
||||
<date>2021-09-14T04:40:42Z</date>
|
||||
<date>2022-02-04T13:51:39Z</date>
|
||||
<key>BackgroundJobAssetRevGeocode</key>
|
||||
<date>2021-09-14T04:40:42Z</date>
|
||||
<date>2022-02-04T13:51:40Z</date>
|
||||
<key>BackgroundJobSearch</key>
|
||||
<date>2021-09-14T04:40:42Z</date>
|
||||
<date>2022-02-04T13:51:40Z</date>
|
||||
<key>BackgroundPeopleSuggestion</key>
|
||||
<date>2021-09-14T04:40:41Z</date>
|
||||
<date>2022-02-04T13:51:39Z</date>
|
||||
<key>BackgroundUserBehaviorProcessor</key>
|
||||
<date>2021-09-14T04:40:42Z</date>
|
||||
<date>2022-02-04T13:51:40Z</date>
|
||||
<key>PhotoAnalysisGraphLastBackgroundGraphConsistencyUpdateJobDateKey</key>
|
||||
<date>2021-07-20T05:48:08Z</date>
|
||||
<key>PhotoAnalysisGraphLastBackgroundGraphRebuildJobDate</key>
|
||||
<date>2021-07-20T05:47:59Z</date>
|
||||
<key>PhotoAnalysisGraphLastBackgroundMemoryGenerationJobDate</key>
|
||||
<date>2021-09-14T04:40:43Z</date>
|
||||
<date>2022-02-04T13:51:40Z</date>
|
||||
<key>SiriPortraitDonation</key>
|
||||
<date>2021-09-14T04:40:42Z</date>
|
||||
<date>2022-02-04T13:51:40Z</date>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
|
After Width: | Height: | Size: 191 KiB |
|
After Width: | Height: | Size: 123 KiB |
|
After Width: | Height: | Size: 178 KiB |
|
After Width: | Height: | Size: 123 KiB |
|
After Width: | Height: | Size: 58 KiB |
|
After Width: | Height: | Size: 32 KiB |
|
After Width: | Height: | Size: 54 KiB |
|
After Width: | Height: | Size: 32 KiB |
@@ -21,6 +21,7 @@ FOLDER_ALBUM_DICT = {
|
||||
ALBUM_NAMES = [
|
||||
"2018-10 - Sponsion, Museum, Frühstück, Römermuseum",
|
||||
"2019-10/11 Paris Clermont",
|
||||
"Água",
|
||||
"AlbumInFolder",
|
||||
"EmptyAlbum",
|
||||
"I have a deleted twin",
|
||||
@@ -38,6 +39,7 @@ ALBUM_NAMES = [
|
||||
ALBUM_PARENT_DICT = {
|
||||
"2018-10 - Sponsion, Museum, Frühstück, Römermuseum": None,
|
||||
"2019-10/11 Paris Clermont": None,
|
||||
"Água": None,
|
||||
"AlbumInFolder": "SubFolder2",
|
||||
"EmptyAlbum": None,
|
||||
"I have a deleted twin": None,
|
||||
@@ -54,6 +56,7 @@ ALBUM_PARENT_DICT = {
|
||||
ALBUM_FOLDER_NAMES_DICT = {
|
||||
"2018-10 - Sponsion, Museum, Frühstück, Römermuseum": [],
|
||||
"2019-10/11 Paris Clermont": [],
|
||||
"Água": [],
|
||||
"AlbumInFolder": ["Folder1", "SubFolder2"],
|
||||
"EmptyAlbum": [],
|
||||
"I have a deleted twin": [],
|
||||
@@ -70,6 +73,7 @@ ALBUM_FOLDER_NAMES_DICT = {
|
||||
ALBUM_LEN_DICT = {
|
||||
"2018-10 - Sponsion, Museum, Frühstück, Römermuseum": 1,
|
||||
"2019-10/11 Paris Clermont": 1,
|
||||
"Água": 3,
|
||||
"AlbumInFolder": 2,
|
||||
"EmptyAlbum": 0,
|
||||
"I have a deleted twin": 1,
|
||||
@@ -103,6 +107,11 @@ ALBUM_PHOTO_UUID_DICT = {
|
||||
"4D521201-92AC-43E5-8F7C-59BC41C37A96",
|
||||
"8E1D7BC9-9321-44F9-8CFB-4083F6B9232A",
|
||||
],
|
||||
"Água": [
|
||||
"7FD37B5F-6FAA-4DB1-8A29-BF9C37E38091",
|
||||
"2DFD33F1-A5D8-486F-A3A9-98C07995535A",
|
||||
"54E76FCB-D353-4557-9997-0A457BCB4D48",
|
||||
],
|
||||
}
|
||||
|
||||
UUID_DICT = {
|
||||
|
||||
@@ -24,10 +24,10 @@ PHOTOS_DB = "tests/Test-10.15.7.photoslibrary/database/photos.db"
|
||||
PHOTOS_DB_PATH = "/Test-10.15.7.photoslibrary/database/photos.db"
|
||||
PHOTOS_LIBRARY_PATH = "/Test-10.15.7.photoslibrary"
|
||||
|
||||
PHOTOS_DB_LEN = 25
|
||||
PHOTOS_NOT_IN_TRASH_LEN = 23
|
||||
PHOTOS_DB_LEN = 29
|
||||
PHOTOS_NOT_IN_TRASH_LEN = 27
|
||||
PHOTOS_IN_TRASH_LEN = 2
|
||||
PHOTOS_DB_IMPORT_SESSIONS = 17
|
||||
PHOTOS_DB_IMPORT_SESSIONS = 21
|
||||
|
||||
KEYWORDS = [
|
||||
"Kids",
|
||||
@@ -72,6 +72,7 @@ ALBUMS = [
|
||||
"Sorted Oldest First",
|
||||
"Sorted Title",
|
||||
"Test Album", # there are 2 albums named "Test Album" for testing duplicate album names
|
||||
"Água",
|
||||
]
|
||||
KEYWORDS_DICT = {
|
||||
"Drink": 2,
|
||||
@@ -115,6 +116,7 @@ ALBUM_DICT = {
|
||||
"Sorted Oldest First": 3,
|
||||
"Sorted Title": 3,
|
||||
"Test Album": 2,
|
||||
"Água": 3,
|
||||
} # Note: there are 2 albums named "Test Album" for testing duplicate album names
|
||||
|
||||
UUID_DICT = {
|
||||
@@ -1091,7 +1093,7 @@ def test_from_to_date(photosdb):
|
||||
time.tzset()
|
||||
|
||||
photos = photosdb.photos(from_date=datetime.datetime(2018, 10, 28))
|
||||
assert len(photos) == 16
|
||||
assert len(photos) == 20
|
||||
|
||||
photos = photosdb.photos(to_date=datetime.datetime(2018, 10, 28))
|
||||
assert len(photos) == 7
|
||||
|
||||
@@ -8,6 +8,7 @@ from click.testing import CliRunner
|
||||
|
||||
import osxphotos
|
||||
from osxphotos.exiftool import get_exiftool_path
|
||||
from osxphotos.utils import normalize_unicode
|
||||
|
||||
CLI_PHOTOS_DB = "tests/Test-10.15.7.photoslibrary"
|
||||
LIVE_PHOTOS_DB = "tests/Test-Cloud-10.15.1.photoslibrary"
|
||||
@@ -79,64 +80,69 @@ CLI_OUTPUT_NO_SUBCOMMAND = [
|
||||
CLI_OUTPUT_QUERY_UUID = '[{"uuid": "D79B8D77-BFFC-460B-9312-034F2877D35B", "filename": "D79B8D77-BFFC-460B-9312-034F2877D35B.jpeg", "original_filename": "Pumkins2.jpg", "date": "2018-09-28T16:07:07-04:00", "description": "Girl holding pumpkin", "title": "I found one!", "keywords": ["Kids"], "albums": ["Pumpkin Farm", "Test Album", "Multi Keyword"], "persons": ["Katie"], "path": "/tests/Test-10.15.7.photoslibrary/originals/D/D79B8D77-BFFC-460B-9312-034F2877D35B.jpeg", "ismissing": false, "hasadjustments": false, "external_edit": false, "favorite": false, "hidden": false, "latitude": 41.256566, "longitude": -95.940257, "path_edited": null, "shared": false, "isphoto": true, "ismovie": false, "uti": "public.jpeg", "burst": false, "live_photo": false, "path_live_photo": null, "iscloudasset": false, "incloud": null}]'
|
||||
|
||||
CLI_EXPORT_FILENAMES = [
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"St James Park.jpg",
|
||||
"St James Park_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"[2020-08-29] AAF035 (1).jpg",
|
||||
"[2020-08-29] AAF035 (2).jpg",
|
||||
"[2020-08-29] AAF035 (3).jpg",
|
||||
"[2020-08-29] AAF035.jpg",
|
||||
"DSC03584.dng",
|
||||
"IMG_1693.tif",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_3092.heic",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Tulips_edited.jpeg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"winebottle.jpeg",
|
||||
"winebottle (1).jpeg",
|
||||
"Frítest.jpg",
|
||||
"Frítest (1).jpg",
|
||||
"Frítest (2).jpg",
|
||||
"Frítest (3).jpg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest_edited (1).jpeg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest.jpg",
|
||||
"IMG_1693.tif",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_3092.heic",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"St James Park_edited.jpeg",
|
||||
"St James Park.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"wedding.jpg",
|
||||
"winebottle (1).jpeg",
|
||||
"winebottle.jpeg",
|
||||
]
|
||||
|
||||
|
||||
CLI_EXPORT_FILENAMES_DRY_RUN = [
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"St James Park.jpg",
|
||||
"St James Park_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"[2020-08-29] AAF035.jpg",
|
||||
"DSC03584.dng",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest.jpg",
|
||||
"IMG_1693.tif",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_3092.heic",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_3092.heic",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Tulips_edited.jpeg",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"St James Park_edited.jpeg",
|
||||
"St James Park.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"wedding.jpg",
|
||||
"winebottle.jpeg",
|
||||
"winebottle.jpeg",
|
||||
"Frítest.jpg",
|
||||
"Frítest_edited.jpeg",
|
||||
]
|
||||
|
||||
CLI_EXPORT_IGNORE_SIGNATURE_FILENAMES = ["Tulips.jpg", "wedding.jpg"]
|
||||
@@ -154,225 +160,253 @@ CLI_EXPORT_ORIGINAL_SUFFIX_TEMPLATE = "{edited?_original,}"
|
||||
CLI_EXPORT_PREVIEW_SUFFIX = "_lowres"
|
||||
|
||||
CLI_EXPORT_FILENAMES_EDITED_SUFFIX = [
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"St James Park.jpg",
|
||||
"St James Park_bearbeiten.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding.jpg",
|
||||
"wedding_bearbeiten.jpeg",
|
||||
"[2020-08-29] AAF035 (1).jpg",
|
||||
"[2020-08-29] AAF035 (2).jpg",
|
||||
"[2020-08-29] AAF035 (3).jpg",
|
||||
"[2020-08-29] AAF035.jpg",
|
||||
"DSC03584.dng",
|
||||
"IMG_1693.tif",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_3092.heic",
|
||||
"IMG_3092_bearbeiten.jpeg",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Tulips_bearbeiten.jpeg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"winebottle.jpeg",
|
||||
"winebottle (1).jpeg",
|
||||
"Frítest.jpg",
|
||||
"Frítest (1).jpg",
|
||||
"Frítest (2).jpg",
|
||||
"Frítest (3).jpg",
|
||||
"Frítest_bearbeiten.jpeg",
|
||||
"Frítest_bearbeiten (1).jpeg",
|
||||
"Frítest_bearbeiten.jpeg",
|
||||
"Frítest.jpg",
|
||||
"IMG_1693.tif",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_3092_bearbeiten.jpeg",
|
||||
"IMG_3092.heic",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"St James Park_bearbeiten.jpeg",
|
||||
"St James Park.jpg",
|
||||
"Tulips_bearbeiten.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding_bearbeiten.jpeg",
|
||||
"wedding.jpg",
|
||||
"winebottle (1).jpeg",
|
||||
"winebottle.jpeg",
|
||||
]
|
||||
|
||||
CLI_EXPORT_FILENAMES_EDITED_SUFFIX_TEMPLATE = [
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"St James Park.jpg",
|
||||
"St James Park_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"[2020-08-29] AAF035 (1).jpg",
|
||||
"[2020-08-29] AAF035 (2).jpg",
|
||||
"[2020-08-29] AAF035 (3).jpg",
|
||||
"[2020-08-29] AAF035.jpg",
|
||||
"DSC03584.dng",
|
||||
"IMG_1693.tif",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_3092.heic",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Tulips_edited.jpeg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"winebottle.jpeg",
|
||||
"winebottle (1).jpeg",
|
||||
"Frítest.jpg",
|
||||
"Frítest (1).jpg",
|
||||
"Frítest (2).jpg",
|
||||
"Frítest (3).jpg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest_edited (1).jpeg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest.jpg",
|
||||
"IMG_1693.tif",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_3092.heic",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"St James Park_edited.jpeg",
|
||||
"St James Park.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"wedding.jpg",
|
||||
"winebottle (1).jpeg",
|
||||
"winebottle.jpeg",
|
||||
]
|
||||
|
||||
CLI_EXPORT_FILENAMES_ORIGINAL_SUFFIX = [
|
||||
"Pumkins1_original.jpg",
|
||||
"Pumkins2_original.jpg",
|
||||
"Pumpkins3_original.jpg",
|
||||
"St James Park_original.jpg",
|
||||
"St James Park_edited.jpeg",
|
||||
"Tulips_original.jpg",
|
||||
"wedding_original.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"[2020-08-29] AAF035_original (1).jpg",
|
||||
"[2020-08-29] AAF035_original (2).jpg",
|
||||
"[2020-08-29] AAF035_original (3).jpg",
|
||||
"[2020-08-29] AAF035_original.jpg",
|
||||
"DSC03584_original.dng",
|
||||
"IMG_1693_original.tif",
|
||||
"IMG_1994_original.JPG",
|
||||
"IMG_1994_original.cr2",
|
||||
"IMG_1997_original.JPG",
|
||||
"IMG_1997_original.cr2",
|
||||
"IMG_3092_original.heic",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_4547_original.jpg",
|
||||
"Jellyfish_original.MOV",
|
||||
"Jellyfish1_original.mp4",
|
||||
"Tulips_edited.jpeg",
|
||||
"screenshot-really-a-png_original.jpeg",
|
||||
"winebottle_original.jpeg",
|
||||
"winebottle_original (1).jpeg",
|
||||
"Frítest_original.jpg",
|
||||
"Frítest_edited (1).jpeg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest_original (1).jpg",
|
||||
"Frítest_original (2).jpg",
|
||||
"Frítest_original (3).jpg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest_edited (1).jpeg",
|
||||
"Frítest_original.jpg",
|
||||
"IMG_1693_original.tif",
|
||||
"IMG_1994_original.cr2",
|
||||
"IMG_1994_original.JPG",
|
||||
"IMG_1997_original.cr2",
|
||||
"IMG_1997_original.JPG",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_3092_original.heic",
|
||||
"IMG_4547_original.jpg",
|
||||
"Jellyfish_original.MOV",
|
||||
"Jellyfish1_original.mp4",
|
||||
"Pumkins1_original.jpg",
|
||||
"Pumkins2_original.jpg",
|
||||
"Pumpkins3_original.jpg",
|
||||
"screenshot-really-a-png_original.jpeg",
|
||||
"St James Park_edited.jpeg",
|
||||
"St James Park_original.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"Tulips_original.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"wedding_original.jpg",
|
||||
"winebottle_original (1).jpeg",
|
||||
"winebottle_original.jpeg",
|
||||
]
|
||||
|
||||
CLI_EXPORT_FILENAMES_ORIGINAL_SUFFIX_TEMPLATE = [
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"St James Park_original.jpg",
|
||||
"St James Park_edited.jpeg",
|
||||
"Tulips_original.jpg",
|
||||
"wedding_original.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"Tulips_edited.jpeg",
|
||||
"[2020-08-29] AAF035 (1).jpg",
|
||||
"[2020-08-29] AAF035 (2).jpg",
|
||||
"[2020-08-29] AAF035 (3).jpg",
|
||||
"[2020-08-29] AAF035.jpg",
|
||||
"DSC03584.dng",
|
||||
"Frítest (1).jpg",
|
||||
"Frítest_edited (1).jpeg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest_original (1).jpg",
|
||||
"Frítest_original.jpg",
|
||||
"Frítest.jpg",
|
||||
"IMG_1693.tif",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_3092_original.heic",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_3092_original.heic",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"winebottle.jpeg",
|
||||
"St James Park_edited.jpeg",
|
||||
"St James Park_original.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"Tulips_original.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"wedding_original.jpg",
|
||||
"winebottle (1).jpeg",
|
||||
"Frítest.jpg",
|
||||
"Frítest (1).jpg",
|
||||
"Frítest_original.jpg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest_original (1).jpg",
|
||||
"Frítest_edited (1).jpeg",
|
||||
"winebottle.jpeg",
|
||||
]
|
||||
|
||||
CLI_EXPORT_FILENAMES_CURRENT = [
|
||||
"1793FAAB-DE75-4E25-886C-2BD66C780D6A_edited.jpeg", # Frítest.jpg
|
||||
"1793FAAB-DE75-4E25-886C-2BD66C780D6A.jpeg", # Frítest.jpg
|
||||
"1EB2B765-0765-43BA-A90C-0D0580E6172C.jpeg",
|
||||
"2DFD33F1-A5D8-486F-A3A9-98C07995535A.jpeg",
|
||||
"35329C57-B963-48D6-BB75-6AFF9370CBBC.mov",
|
||||
"3DD2C897-F19E-4CA6-8C22-B027D5A71907.jpeg",
|
||||
"4D521201-92AC-43E5-8F7C-59BC41C37A96.cr2",
|
||||
"4D521201-92AC-43E5-8F7C-59BC41C37A96.jpeg",
|
||||
"52083079-73D5-4921-AC1B-FE76F279133F.jpeg",
|
||||
"54E76FCB-D353-4557-9997-0A457BCB4D48.jpeg",
|
||||
"6191423D-8DB8-4D4C-92BE-9BBBA308AAC4_edited.jpeg",
|
||||
"6191423D-8DB8-4D4C-92BE-9BBBA308AAC4.jpeg",
|
||||
"7783E8E6-9CAC-40F3-BE22-81FB7051C266_edited.jpeg",
|
||||
"7783E8E6-9CAC-40F3-BE22-81FB7051C266.heic",
|
||||
"7F74DD34-5920-4DA3-B284-479887A34F66.jpeg",
|
||||
"7FD37B5F-6FAA-4DB1-8A29-BF9C37E38091.jpeg",
|
||||
"8846E3E6-8AC8-4857-8448-E3D025784410.tiff",
|
||||
"A8266C97-9BAF-4AF4-99F3-0013832869B8.jpeg", # Frítest.jpg
|
||||
"A92D9C26-3A50-4197-9388-CB5F7DB9FA91.cr2",
|
||||
"A92D9C26-3A50-4197-9388-CB5F7DB9FA91.jpeg",
|
||||
"D05A5FE3-15FB-49A1-A15D-AB3DA6F8B068.dng",
|
||||
"D79B8D77-BFFC-460B-9312-034F2877D35B.jpeg",
|
||||
"DC99FBDD-7A52-4100-A5BB-344131646C30.jpeg",
|
||||
"DC99FBDD-7A52-4100-A5BB-344131646C30_edited.jpeg",
|
||||
"E9BC5C36-7CD1-40A1-A72B-8B8FAC227D51.jpeg",
|
||||
"E9BC5C36-7CD1-40A1-A72B-8B8FAC227D51_edited.jpeg",
|
||||
"F12384F6-CD17-4151-ACBA-AE0E3688539E.jpeg",
|
||||
"35329C57-B963-48D6-BB75-6AFF9370CBBC.mov",
|
||||
"6191423D-8DB8-4D4C-92BE-9BBBA308AAC4_edited.jpeg",
|
||||
"7783E8E6-9CAC-40F3-BE22-81FB7051C266.heic",
|
||||
"7783E8E6-9CAC-40F3-BE22-81FB7051C266_edited.jpeg",
|
||||
"7F74DD34-5920-4DA3-B284-479887A34F66.jpeg",
|
||||
"8846E3E6-8AC8-4857-8448-E3D025784410.tiff",
|
||||
"D1359D09-1373-4F3B-B0E3-1A4DE573E4A3.mp4",
|
||||
"E2078879-A29C-4D6F-BACB-E3BBE6C3EB91.jpeg",
|
||||
"52083079-73D5-4921-AC1B-FE76F279133F.jpeg",
|
||||
"B13F4485-94E0-41CD-AF71-913095D62E31.jpeg", # Frítest.jpg
|
||||
"1793FAAB-DE75-4E25-886C-2BD66C780D6A.jpeg", # Frítest.jpg
|
||||
"1793FAAB-DE75-4E25-886C-2BD66C780D6A_edited.jpeg", # Frítest.jpg
|
||||
"A8266C97-9BAF-4AF4-99F3-0013832869B8.jpeg", # Frítest.jpg
|
||||
"D1D4040D-D141-44E8-93EA-E403D9F63E07.jpeg", # Frítest.jpg
|
||||
"D05A5FE3-15FB-49A1-A15D-AB3DA6F8B068.dng",
|
||||
"D1359D09-1373-4F3B-B0E3-1A4DE573E4A3.mp4",
|
||||
"D1D4040D-D141-44E8-93EA-E403D9F63E07_edited.jpeg", # Frítest.jpg
|
||||
"D1D4040D-D141-44E8-93EA-E403D9F63E07.jpeg", # Frítest.jpg
|
||||
"D79B8D77-BFFC-460B-9312-034F2877D35B.jpeg",
|
||||
"DC99FBDD-7A52-4100-A5BB-344131646C30_edited.jpeg",
|
||||
"DC99FBDD-7A52-4100-A5BB-344131646C30.jpeg",
|
||||
"E2078879-A29C-4D6F-BACB-E3BBE6C3EB91.jpeg",
|
||||
"E9BC5C36-7CD1-40A1-A72B-8B8FAC227D51_edited.jpeg",
|
||||
"E9BC5C36-7CD1-40A1-A72B-8B8FAC227D51.jpeg",
|
||||
"F12384F6-CD17-4151-ACBA-AE0E3688539E.jpeg",
|
||||
"F207D5DE-EFAD-4217-8424-0764AAC971D0.jpeg",
|
||||
]
|
||||
|
||||
CLI_EXPORT_FILENAMES_CONVERT_TO_JPEG = [
|
||||
"[2020-08-29] AAF035 (1).jpg",
|
||||
"[2020-08-29] AAF035 (2).jpg",
|
||||
"[2020-08-29] AAF035 (3).jpg",
|
||||
"[2020-08-29] AAF035.jpg",
|
||||
"DSC03584.jpeg",
|
||||
"IMG_1693.jpeg",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_3092.jpeg",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_4547.jpg",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"St James Park.jpg",
|
||||
"St James Park_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"wedding.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"winebottle.jpeg",
|
||||
"winebottle (1).jpeg",
|
||||
"Frítest.jpg",
|
||||
"Frítest (1).jpg",
|
||||
"Frítest (2).jpg",
|
||||
"Frítest (3).jpg",
|
||||
"Frítest_edited (1).jpeg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest.jpg",
|
||||
"IMG_1693.jpeg",
|
||||
"IMG_1994.cr2",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1997.cr2",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_3092.jpeg",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"St James Park_edited.jpeg",
|
||||
"St James Park.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"wedding.jpg",
|
||||
"winebottle (1).jpeg",
|
||||
"winebottle.jpeg",
|
||||
]
|
||||
|
||||
CLI_EXPORT_FILENAMES_CONVERT_TO_JPEG_SKIP_RAW = [
|
||||
"[2020-08-29] AAF035 (1).jpg",
|
||||
"[2020-08-29] AAF035 (2).jpg",
|
||||
"[2020-08-29] AAF035 (3).jpg",
|
||||
"[2020-08-29] AAF035.jpg",
|
||||
"DSC03584.jpeg",
|
||||
"IMG_1693.jpeg",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_3092.jpeg",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_4547.jpg",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"St James Park.jpg",
|
||||
"St James Park_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"wedding.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"winebottle.jpeg",
|
||||
"winebottle (1).jpeg",
|
||||
"Frítest.jpg",
|
||||
"Frítest (1).jpg",
|
||||
"Frítest (2).jpg",
|
||||
"Frítest (3).jpg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest_edited (1).jpeg",
|
||||
"Frítest_edited.jpeg",
|
||||
"Frítest.jpg",
|
||||
"IMG_1693.jpeg",
|
||||
"IMG_1994.JPG",
|
||||
"IMG_1997.JPG",
|
||||
"IMG_3092_edited.jpeg",
|
||||
"IMG_3092.jpeg",
|
||||
"IMG_4547.jpg",
|
||||
"Jellyfish.MOV",
|
||||
"Jellyfish1.mp4",
|
||||
"Pumkins1.jpg",
|
||||
"Pumkins2.jpg",
|
||||
"Pumpkins3.jpg",
|
||||
"screenshot-really-a-png.jpeg",
|
||||
"St James Park_edited.jpeg",
|
||||
"St James Park.jpg",
|
||||
"Tulips_edited.jpeg",
|
||||
"Tulips.jpg",
|
||||
"wedding_edited.jpeg",
|
||||
"wedding.jpg",
|
||||
"winebottle (1).jpeg",
|
||||
"winebottle.jpeg",
|
||||
]
|
||||
|
||||
CLI_EXPORT_CONVERT_TO_JPEG_LARGE_FILE = "DSC03584.jpeg"
|
||||
@@ -546,7 +580,7 @@ PHOTOS_NOT_IN_TRASH_LEN_14_6 = 12
|
||||
PHOTOS_IN_TRASH_LEN_14_6 = 1
|
||||
PHOTOS_MISSING_14_6 = 1
|
||||
|
||||
PHOTOS_NOT_IN_TRASH_LEN_15_7 = 23
|
||||
PHOTOS_NOT_IN_TRASH_LEN_15_7 = 27
|
||||
PHOTOS_IN_TRASH_LEN_15_7 = 2
|
||||
PHOTOS_MISSING_15_7 = 2
|
||||
PHOTOS_EDITED_15_7 = 6
|
||||
@@ -732,6 +766,7 @@ ALBUMS_JSON = {
|
||||
"Sorted Newest First": 3,
|
||||
"Sorted Oldest First": 3,
|
||||
"Sorted Title": 3,
|
||||
"Água": 3,
|
||||
},
|
||||
"shared albums": {},
|
||||
}
|
||||
@@ -746,6 +781,7 @@ ALBUMS_STR = """albums:
|
||||
2018-10 - Sponsion, Museum, Frühstück, Römermuseum: 1
|
||||
2019-10/11 Paris Clermont: 1
|
||||
EmptyAlbum: 0
|
||||
Água: 3
|
||||
shared albums: {}
|
||||
"""
|
||||
|
||||
@@ -820,37 +856,45 @@ UUID_IS_REFERENCE = [
|
||||
]
|
||||
|
||||
UUID_IN_ALBUM = [
|
||||
"F12384F6-CD17-4151-ACBA-AE0E3688539E",
|
||||
"8E1D7BC9-9321-44F9-8CFB-4083F6B9232A",
|
||||
"1EB2B765-0765-43BA-A90C-0D0580E6172C",
|
||||
"E9BC5C36-7CD1-40A1-A72B-8B8FAC227D51",
|
||||
"A92D9C26-3A50-4197-9388-CB5F7DB9FA91",
|
||||
"D79B8D77-BFFC-460B-9312-034F2877D35B",
|
||||
"4D521201-92AC-43E5-8F7C-59BC41C37A96",
|
||||
"D05A5FE3-15FB-49A1-A15D-AB3DA6F8B068",
|
||||
"2DFD33F1-A5D8-486F-A3A9-98C07995535A",
|
||||
"3DD2C897-F19E-4CA6-8C22-B027D5A71907",
|
||||
"4D521201-92AC-43E5-8F7C-59BC41C37A96",
|
||||
"54E76FCB-D353-4557-9997-0A457BCB4D48",
|
||||
"7783E8E6-9CAC-40F3-BE22-81FB7051C266",
|
||||
"7FD37B5F-6FAA-4DB1-8A29-BF9C37E38091",
|
||||
"8E1D7BC9-9321-44F9-8CFB-4083F6B9232A",
|
||||
"A92D9C26-3A50-4197-9388-CB5F7DB9FA91",
|
||||
"D05A5FE3-15FB-49A1-A15D-AB3DA6F8B068",
|
||||
"D79B8D77-BFFC-460B-9312-034F2877D35B",
|
||||
"E9BC5C36-7CD1-40A1-A72B-8B8FAC227D51",
|
||||
"F12384F6-CD17-4151-ACBA-AE0E3688539E",
|
||||
]
|
||||
|
||||
UUID_NOT_IN_ALBUM = [
|
||||
"A1DD1F98-2ECD-431F-9AC9-5AFEFE2D3A5C",
|
||||
"DC99FBDD-7A52-4100-A5BB-344131646C30",
|
||||
"D1359D09-1373-4F3B-B0E3-1A4DE573E4A3",
|
||||
"E2078879-A29C-4D6F-BACB-E3BBE6C3EB91",
|
||||
"6191423D-8DB8-4D4C-92BE-9BBBA308AAC4",
|
||||
"35329C57-B963-48D6-BB75-6AFF9370CBBC",
|
||||
"8846E3E6-8AC8-4857-8448-E3D025784410",
|
||||
"7F74DD34-5920-4DA3-B284-479887A34F66",
|
||||
"52083079-73D5-4921-AC1B-FE76F279133F",
|
||||
"B13F4485-94E0-41CD-AF71-913095D62E31", # Frítest.jpg
|
||||
"1793FAAB-DE75-4E25-886C-2BD66C780D6A", # Frítest.jpg
|
||||
"35329C57-B963-48D6-BB75-6AFF9370CBBC",
|
||||
"52083079-73D5-4921-AC1B-FE76F279133F",
|
||||
"6191423D-8DB8-4D4C-92BE-9BBBA308AAC4",
|
||||
"7F74DD34-5920-4DA3-B284-479887A34F66",
|
||||
"8846E3E6-8AC8-4857-8448-E3D025784410",
|
||||
"A1DD1F98-2ECD-431F-9AC9-5AFEFE2D3A5C",
|
||||
"A8266C97-9BAF-4AF4-99F3-0013832869B8", # Frítest.jpg
|
||||
"B13F4485-94E0-41CD-AF71-913095D62E31", # Frítest.jpg
|
||||
"D1359D09-1373-4F3B-B0E3-1A4DE573E4A3",
|
||||
"D1D4040D-D141-44E8-93EA-E403D9F63E07", # Frítest.jpg
|
||||
"DC99FBDD-7A52-4100-A5BB-344131646C30",
|
||||
"E2078879-A29C-4D6F-BACB-E3BBE6C3EB91",
|
||||
"F207D5DE-EFAD-4217-8424-0764AAC971D0",
|
||||
]
|
||||
|
||||
UUID_DUPLICATES = [
|
||||
"7F74DD34-5920-4DA3-B284-479887A34F66",
|
||||
"2DFD33F1-A5D8-486F-A3A9-98C07995535A",
|
||||
"52083079-73D5-4921-AC1B-FE76F279133F",
|
||||
"54E76FCB-D353-4557-9997-0A457BCB4D48",
|
||||
"7F74DD34-5920-4DA3-B284-479887A34F66",
|
||||
"A92D9C26-3A50-4197-9388-CB5F7DB9FA91",
|
||||
"F207D5DE-EFAD-4217-8424-0764AAC971D0",
|
||||
]
|
||||
|
||||
UUID_LOCATION = "D79B8D77-BFFC-460B-9312-034F2877D35B" # Pumkins2.jpg
|
||||
@@ -1402,6 +1446,7 @@ def test_query_exif_case_insensitive(exiftag, exifvalue, uuid_expected):
|
||||
|
||||
|
||||
def test_export():
|
||||
"""Test basic export"""
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
@@ -1418,6 +1463,24 @@ def test_export():
|
||||
files = glob.glob("*")
|
||||
assert sorted(files) == sorted(CLI_EXPORT_FILENAMES)
|
||||
|
||||
def test_export_multiprocess():
|
||||
"""Test basic export with --multiprocess"""
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import osxphotos
|
||||
from osxphotos.cli import export
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
# pylint: disable=not-context-manager
|
||||
with runner.isolated_filesystem():
|
||||
result = runner.invoke(export, [os.path.join(cwd, CLI_PHOTOS_DB), ".", "-V", "--multiprocess", "2"])
|
||||
assert result.exit_code == 0
|
||||
files = glob.glob("*")
|
||||
assert sorted(files) == sorted(CLI_EXPORT_FILENAMES)
|
||||
|
||||
|
||||
def test_export_uuid_from_file():
|
||||
"""Test export with --uuid-from-file"""
|
||||
@@ -2517,7 +2580,8 @@ def test_export_duplicate():
|
||||
# pylint: disable=not-context-manager
|
||||
with runner.isolated_filesystem():
|
||||
result = runner.invoke(
|
||||
export, [os.path.join(cwd, CLI_PHOTOS_DB), ".", "-V", "--duplicate"]
|
||||
export,
|
||||
[os.path.join(cwd, CLI_PHOTOS_DB), ".", "-V", "--duplicate", "--skip-raw"],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
files = glob.glob("*")
|
||||
@@ -4046,8 +4110,7 @@ def test_export_filename_template_long_description():
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
for fname in CLI_EXPORTED_FILENAME_TEMPLATE_LONG_DESCRIPTION:
|
||||
assert pathlib.Path(fname).is_file()
|
||||
assert "exported: 1" in result.output
|
||||
|
||||
|
||||
def test_export_filename_template_3():
|
||||
@@ -5084,7 +5147,7 @@ def test_export_dry_run():
|
||||
in result.output
|
||||
)
|
||||
for filepath in CLI_EXPORT_FILENAMES_DRY_RUN:
|
||||
assert re.search(r"Exported.*" + f"{filepath}", result.output)
|
||||
assert re.search(r"Exported.*" + f"{re.escape(normalize_fs_path(filepath))}", result.output)
|
||||
assert not os.path.isfile(normalize_fs_path(filepath))
|
||||
|
||||
|
||||
@@ -6029,7 +6092,7 @@ def test_export_cleanup_empty_album():
|
||||
|
||||
|
||||
def test_export_cleanup_accented_album_name():
|
||||
"""test export with --cleanup flag and photos in album with accented unicode characters (#561)"""
|
||||
"""test export with --cleanup flag and photos in album with accented unicode characters (#561, #618)"""
|
||||
import pathlib
|
||||
|
||||
from osxphotos.cli import export
|
||||
@@ -6052,6 +6115,89 @@ def test_export_cleanup_accented_album_name():
|
||||
)
|
||||
assert "Deleted: 0 files, 0 directories" in result.output
|
||||
|
||||
# do it again
|
||||
result = runner.invoke(
|
||||
export,
|
||||
[
|
||||
os.path.join(cwd, CLI_PHOTOS_DB),
|
||||
tempdir,
|
||||
"-V",
|
||||
"--update",
|
||||
"--cleanup",
|
||||
"--directory",
|
||||
"{folder_album}",
|
||||
"--update",
|
||||
],
|
||||
)
|
||||
assert "exported: 0, updated: 0" in result.output
|
||||
assert "Deleted: 0 files, 0 directories" in result.output
|
||||
|
||||
|
||||
@pytest.mark.skipif(exiftool is None, reason="exiftool not installed")
|
||||
def test_export_cleanup_exiftool_accented_album_name_same_filenames():
|
||||
"""test export with --cleanup flag and photos in album with accented unicode characters (#561, #618)"""
|
||||
import pathlib
|
||||
|
||||
from osxphotos.cli import export
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
# pylint: disable=not-context-manager
|
||||
with tempfile.TemporaryDirectory() as report_dir:
|
||||
# keep report file out of of expor dir for --cleanup
|
||||
report_file = os.path.join(report_dir, "test.csv")
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
result = runner.invoke(
|
||||
export,
|
||||
[
|
||||
os.path.join(cwd, CLI_PHOTOS_DB),
|
||||
tempdir,
|
||||
"-V",
|
||||
"--cleanup",
|
||||
"--directory",
|
||||
"{album[/,.|:,.]}",
|
||||
"--exiftool",
|
||||
"--exiftool-merge-keywords",
|
||||
"--exiftool-merge-persons",
|
||||
"--keyword-template",
|
||||
"{keyword}",
|
||||
"--report",
|
||||
report_file,
|
||||
"--skip-original-if-edited",
|
||||
"--update",
|
||||
"--touch-file",
|
||||
"--not-hidden",
|
||||
],
|
||||
)
|
||||
assert "Deleted: 0 files, 0 directories" in result.output
|
||||
|
||||
# do it again
|
||||
result = runner.invoke(
|
||||
export,
|
||||
[
|
||||
os.path.join(cwd, CLI_PHOTOS_DB),
|
||||
tempdir,
|
||||
"-V",
|
||||
"--cleanup",
|
||||
"--directory",
|
||||
"{album[/,.|:,.]}",
|
||||
"--exiftool",
|
||||
"--exiftool-merge-keywords",
|
||||
"--exiftool-merge-persons",
|
||||
"--keyword-template",
|
||||
"{keyword}",
|
||||
"--report",
|
||||
report_file,
|
||||
"--skip-original-if-edited",
|
||||
"--update",
|
||||
"--touch-file",
|
||||
"--not-hidden",
|
||||
],
|
||||
)
|
||||
assert "exported: 0, updated: 0" in result.output
|
||||
assert "updated EXIF data: 0" in result.output
|
||||
assert "Deleted: 0 files, 0 directories" in result.output
|
||||
|
||||
|
||||
def test_save_load_config():
|
||||
"""test --save-config, --load-config"""
|
||||
@@ -7008,6 +7154,30 @@ def test_query_name():
|
||||
assert json_got[0]["original_filename"] == "DSC03584.dng"
|
||||
|
||||
|
||||
def test_query_name_unicode():
|
||||
"""test query --name with a unicode name"""
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import osxphotos
|
||||
from osxphotos.cli import query
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
result = runner.invoke(
|
||||
query,
|
||||
["--json", "--db", os.path.join(cwd, PHOTOS_DB_15_7), "--name", "Frítest"],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
json_got = json.loads(result.output)
|
||||
|
||||
assert len(json_got) == 4
|
||||
assert normalize_unicode(json_got[0]["original_filename"]).startswith(
|
||||
normalize_unicode("Frítest.jpg")
|
||||
)
|
||||
|
||||
|
||||
def test_query_name_i():
|
||||
"""test query --name -i"""
|
||||
import json
|
||||
@@ -7037,6 +7207,46 @@ def test_query_name_i():
|
||||
assert json_got[0]["original_filename"] == "DSC03584.dng"
|
||||
|
||||
|
||||
def test_query_name_original_filename():
|
||||
"""test query --name only searches original filename on Photos 5+"""
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
|
||||
from osxphotos.cli import query
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
result = runner.invoke(
|
||||
query,
|
||||
["--json", "--db", os.path.join(cwd, PHOTOS_DB_15_7), "--name", "AA"],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
json_got = json.loads(result.output)
|
||||
|
||||
assert len(json_got) == 4
|
||||
|
||||
|
||||
def test_query_name_original_filename_i():
|
||||
"""test query --name only searches original filename on Photos 5+ with -i"""
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
|
||||
from osxphotos.cli import query
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
result = runner.invoke(
|
||||
query,
|
||||
["--json", "--db", os.path.join(cwd, PHOTOS_DB_15_7), "--name", "aa", "-i"],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
json_got = json.loads(result.output)
|
||||
|
||||
assert len(json_got) == 4
|
||||
|
||||
|
||||
def test_export_name():
|
||||
"""test export --name"""
|
||||
import glob
|
||||
|
||||
@@ -140,7 +140,6 @@ def test_export_edited_exiftool(photosdb):
|
||||
got_dest = photos[0].export(
|
||||
dest, use_photos_export=True, edited=True, exiftool=True
|
||||
)
|
||||
logging.warning(got_dest)
|
||||
got_dest = got_dest[0]
|
||||
|
||||
assert os.path.isfile(got_dest)
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
""" Test path_utils.py """
|
||||
|
||||
|
||||
def test_sanitize_filename():
|
||||
"""test sanitize_filename"""
|
||||
|
||||
# subtract 6 chars from max length of 255 to account for lock file extension
|
||||
from osxphotos.path_utils import sanitize_filename
|
||||
from osxphotos._constants import MAX_FILENAME_LEN
|
||||
|
||||
@@ -30,25 +34,25 @@ def test_sanitize_filename():
|
||||
filename = "foo" + "x" * 512
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo" + "x" * 252
|
||||
assert new_filename == "foo" + "x" * (252 - 6)
|
||||
|
||||
# filename too long with extension
|
||||
filename = "x" * 512 + ".jpeg"
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "x" * 250 + ".jpeg"
|
||||
assert new_filename == "x" * (250 - 6) + ".jpeg"
|
||||
|
||||
# more than one extension
|
||||
filename = "foo.bar" + "x" * 255 + ".foo.bar.jpeg"
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo.bar" + "x" * 243 + ".jpeg"
|
||||
assert new_filename == "foo.bar" + "x" * (243 - 6) + ".jpeg"
|
||||
|
||||
# shorter than drop count
|
||||
filename = "foo." + "x" * 256
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo." + "x" * 251
|
||||
assert new_filename == "foo." + "x" * (251 - 6)
|
||||
|
||||
|
||||
def test_sanitize_dirname():
|
||||
@@ -83,6 +87,7 @@ def test_sanitize_dirname():
|
||||
assert len(new_dirname) == MAX_DIRNAME_LEN
|
||||
assert new_dirname == "foo" + "x" * 252
|
||||
|
||||
|
||||
def test_sanitize_pathpart():
|
||||
from osxphotos.path_utils import sanitize_pathpart
|
||||
from osxphotos._constants import MAX_DIRNAME_LEN
|
||||
@@ -114,4 +119,3 @@ def test_sanitize_pathpart():
|
||||
new_dirname = sanitize_pathpart(dirname)
|
||||
assert len(new_dirname) == MAX_DIRNAME_LEN
|
||||
assert new_dirname == "foo" + "x" * 252
|
||||
|
||||
|
||||
57
utils/exiftool_supported_types.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Read the "Supported File Types" table from exiftool.org and build a json file from the table"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
url = "https://www.exiftool.org/"
|
||||
json_file = "exiftool_filetypes.json"
|
||||
|
||||
html_content = requests.get(url).text
|
||||
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
|
||||
# uncomment to see all table classes
|
||||
# print("Classes of each table:")
|
||||
# for table in soup.find_all("table"):
|
||||
# print(table.get("class"))
|
||||
|
||||
# strip footnotes in <span> tags
|
||||
for span_tag in soup.findAll("span"):
|
||||
span_tag.replace_with("")
|
||||
|
||||
# find the table for Supported File Types
|
||||
table = soup.find("table", class_="sticky tight sm bm")
|
||||
|
||||
# get table headers
|
||||
table_headers = [tx.text.lower() for tx in table.find_all("th")]
|
||||
|
||||
# get table data
|
||||
table_data = []
|
||||
for tr in table.find_all("tr"):
|
||||
if row := [td.text for td in tr.find_all("td")]:
|
||||
table_data.append(row)
|
||||
|
||||
# make a dictionary of the table data
|
||||
supported_filetypes = {}
|
||||
for row in table_data:
|
||||
row_dict = dict(zip(table_headers, row))
|
||||
for key, value in row_dict.items():
|
||||
if value == "-":
|
||||
row_dict[key] = None
|
||||
row_dict["file type"] = row_dict["file type"].split(",")
|
||||
row_dict["file type"] = [ft.strip() for ft in row_dict["file type"]]
|
||||
row_dict["read"] = "R" in row_dict["support"]
|
||||
row_dict["write"] = "W" in row_dict["support"]
|
||||
row_dict["create"] = "C" in row_dict["support"]
|
||||
filetypes = [ft.lower() for ft in row_dict["file type"]]
|
||||
for filetype in filetypes:
|
||||
supported_filetypes[filetype] = {"extension": filetype, **row_dict}
|
||||
|
||||
with open(json_file, "w") as jsonfile:
|
||||
print(f"Writing {json_file}...")
|
||||
json.dump(supported_filetypes, jsonfile, indent=4)
|
||||