Compare commits
6 Commits
v0.45.11
...
multiproce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5bdd52df25 | ||
|
|
3cde0b79c9 | ||
|
|
e2bd262f75 | ||
|
|
db26532bab | ||
|
|
7a73b9168d | ||
|
|
79dcfb38a8 |
14
CHANGELOG.md
14
CHANGELOG.md
@@ -4,20 +4,6 @@ All notable changes to this project will be documented in this file. Dates are d
|
||||
|
||||
Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
|
||||
|
||||
#### [v0.45.10](https://github.com/RhetTbull/osxphotos/compare/v0.45.9...v0.45.10)
|
||||
|
||||
> 12 February 2022
|
||||
|
||||
- Added --force-update, #621 [`30abddd`](https://github.com/RhetTbull/osxphotos/commit/30abdddaf3765f1d604984d4781b78b7806871e1)
|
||||
|
||||
#### [v0.45.9](https://github.com/RhetTbull/osxphotos/compare/v0.45.8...v0.45.9)
|
||||
|
||||
> 12 February 2022
|
||||
|
||||
- Added --force-update, #621 [`bfa888a`](https://github.com/RhetTbull/osxphotos/commit/bfa888adc5658a2845dcaa9b7ea360926ed4f000)
|
||||
- Refactored fix for #627 [`5fb686a`](https://github.com/RhetTbull/osxphotos/commit/5fb686ac0c231932c2695fc550a0824307bd3c5f)
|
||||
- Fix for #630 [`ac4083b`](https://github.com/RhetTbull/osxphotos/commit/ac4083bfbbabc8550718f0f7f8aadc635c05eb25)
|
||||
|
||||
#### [v0.45.8](https://github.com/RhetTbull/osxphotos/compare/v0.45.6...v0.45.8)
|
||||
|
||||
> 5 February 2022
|
||||
|
||||
18
README.md
18
README.md
@@ -783,15 +783,8 @@ Options:
|
||||
folder.
|
||||
--deleted-only Include only photos from the 'Recently
|
||||
Deleted' folder.
|
||||
--update Only export new or updated files. See also
|
||||
--force-update and notes below on export and
|
||||
--update.
|
||||
--force-update Only export new or updated files. Unlike
|
||||
--update, --force-update will re-export photos
|
||||
if their metadata has changed even if this
|
||||
would not otherwise trigger an export. See
|
||||
also --update and notes below on export and
|
||||
--update.
|
||||
--update Only export new or updated files. See notes
|
||||
below on export and --update.
|
||||
--ignore-signature When used with '--update', ignores file
|
||||
signature when updating files. This is useful
|
||||
if you have processed or edited exported
|
||||
@@ -1187,6 +1180,9 @@ Options:
|
||||
--save-config <config file path>
|
||||
Save options to file for use with --load-
|
||||
config. File format is TOML.
|
||||
-M, --multiprocess NUMBER_OF_PROCESSES
|
||||
Run export in parallel using
|
||||
NUMBER_OF_PROCESSES processes. [x>=1]
|
||||
--help Show this message and exit.
|
||||
|
||||
** Export **
|
||||
@@ -1732,7 +1728,7 @@ Substitution Description
|
||||
{lf} A line feed: '\n', alias for {newline}
|
||||
{cr} A carriage return: '\r'
|
||||
{crlf} a carriage return + line feed: '\r\n'
|
||||
{osxphotos_version} The osxphotos version, e.g. '0.45.11'
|
||||
{osxphotos_version} The osxphotos version, e.g. '0.45.8'
|
||||
{osxphotos_cmd_line} The full command line used to run osxphotos
|
||||
|
||||
The following substitutions may result in multiple values. Thus if specified for
|
||||
@@ -3636,7 +3632,7 @@ The following template field substitutions are availabe for use the templating s
|
||||
|{lf}|A line feed: '\n', alias for {newline}|
|
||||
|{cr}|A carriage return: '\r'|
|
||||
|{crlf}|a carriage return + line feed: '\r\n'|
|
||||
|{osxphotos_version}|The osxphotos version, e.g. '0.45.11'|
|
||||
|{osxphotos_version}|The osxphotos version, e.g. '0.45.8'|
|
||||
|{osxphotos_cmd_line}|The full command line used to run osxphotos|
|
||||
|{album}|Album(s) photo is contained in|
|
||||
|{folder_album}|Folder path + album photo is contained in. e.g. 'Folder/Subfolder/Album' or just 'Album' if no enclosing folder|
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Sphinx build info version 1
|
||||
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
|
||||
config: 4096293689c0c969f1ec21d5ea133ab2
|
||||
config: bf43bf49b725c31ce72a8823e4f8012b
|
||||
tags: 645f666f9bcd5a90fca523b33c5a78b7
|
||||
|
||||
2
docs/_static/documentation_options.js
vendored
2
docs/_static/documentation_options.js
vendored
@@ -1,6 +1,6 @@
|
||||
var DOCUMENTATION_OPTIONS = {
|
||||
URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
|
||||
VERSION: '0.45.11',
|
||||
VERSION: '0.45.8',
|
||||
LANGUAGE: 'None',
|
||||
COLLAPSE_INDEX: false,
|
||||
BUILDER: 'html',
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>osxphotos command line interface (CLI) — osxphotos 0.45.11 documentation</title>
|
||||
<title>osxphotos command line interface (CLI) — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Index — osxphotos 0.45.11 documentation</title>
|
||||
<title>Index — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>Welcome to osxphotos’s documentation! — osxphotos 0.45.11 documentation</title>
|
||||
<title>Welcome to osxphotos’s documentation! — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>osxphotos — osxphotos 0.45.11 documentation</title>
|
||||
<title>osxphotos — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>osxphotos package — osxphotos 0.45.11 documentation</title>
|
||||
<title>osxphotos package — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Search — osxphotos 0.45.11 documentation</title>
|
||||
<title>Search — osxphotos 0.45.8 documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
|
||||
|
||||
@@ -214,7 +214,8 @@ SEARCH_CATEGORY_PHOTO_NAME = 2056
|
||||
|
||||
|
||||
# Max filename length on MacOS
|
||||
MAX_FILENAME_LEN = 255
|
||||
# subtract 6 chars for the lock file extension in form: ".filename.lock"
|
||||
MAX_FILENAME_LEN = 255 - 6
|
||||
|
||||
# Max directory name length on MacOS
|
||||
MAX_DIRNAME_LEN = 255
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
""" version info """
|
||||
|
||||
__version__ = "0.45.11"
|
||||
__version__ = "0.45.8"
|
||||
|
||||
1072
osxphotos/cli.py
1072
osxphotos/cli.py
File diff suppressed because it is too large
Load Diff
@@ -18,9 +18,8 @@ from .utils import normalize_fs_path
|
||||
|
||||
__all__ = ["ExportDB_ABC", "ExportDBNoOp", "ExportDB", "ExportDBInMemory"]
|
||||
|
||||
OSXPHOTOS_EXPORTDB_VERSION = "5.0"
|
||||
OSXPHOTOS_EXPORTDB_VERSION = "4.3"
|
||||
OSXPHOTOS_EXPORTDB_VERSION_MIGRATE_FILEPATH = "4.3"
|
||||
OSXPHOTOS_EXPORTDB_VERSION_MIGRATE_TABLES = "4.3"
|
||||
|
||||
OSXPHOTOS_ABOUT_STRING = f"Created by osxphotos version {__version__} (https://github.com/RhetTbull/osxphotos) on {datetime.datetime.now()}"
|
||||
|
||||
@@ -104,14 +103,6 @@ class ExportDB_ABC(ABC):
|
||||
def set_detected_text_for_uuid(self, uuid, json_text):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_metadata_for_file(self, filename, metadata):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_metadata_for_file(self, filename):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_data(
|
||||
self,
|
||||
@@ -123,10 +114,12 @@ class ExportDB_ABC(ABC):
|
||||
edited_stat=None,
|
||||
info_json=None,
|
||||
exif_json=None,
|
||||
metadata=None,
|
||||
):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_connection(self):
|
||||
pass
|
||||
|
||||
class ExportDBNoOp(ExportDB_ABC):
|
||||
"""An ExportDB with NoOp methods"""
|
||||
@@ -193,12 +186,6 @@ class ExportDBNoOp(ExportDB_ABC):
|
||||
def set_detected_text_for_uuid(self, uuid, json_text):
|
||||
pass
|
||||
|
||||
def set_metadata_for_file(self, filename, metadata):
|
||||
pass
|
||||
|
||||
def get_metadata_for_file(self, filename):
|
||||
pass
|
||||
|
||||
def set_data(
|
||||
self,
|
||||
filename,
|
||||
@@ -209,10 +196,11 @@ class ExportDBNoOp(ExportDB_ABC):
|
||||
edited_stat=None,
|
||||
info_json=None,
|
||||
exif_json=None,
|
||||
metadata=None,
|
||||
):
|
||||
pass
|
||||
|
||||
def get_connection(self):
|
||||
pass
|
||||
|
||||
class ExportDB(ExportDB_ABC):
|
||||
"""Interface to sqlite3 database used to store state information for osxphotos export command"""
|
||||
@@ -233,7 +221,7 @@ class ExportDB(ExportDB_ABC):
|
||||
returns None if filename not found in database
|
||||
"""
|
||||
filepath_normalized = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -251,7 +239,7 @@ class ExportDB(ExportDB_ABC):
|
||||
"""set UUID of filename to uuid in the database"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path))
|
||||
filename_normalized = self._normalize_filepath(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -271,7 +259,7 @@ class ExportDB(ExportDB_ABC):
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -289,7 +277,7 @@ class ExportDB(ExportDB_ABC):
|
||||
returns: tuple of (mode, size, mtime)
|
||||
"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -328,7 +316,7 @@ class ExportDB(ExportDB_ABC):
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -346,7 +334,7 @@ class ExportDB(ExportDB_ABC):
|
||||
returns: tuple of (mode, size, mtime)
|
||||
"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -379,7 +367,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_info_for_uuid(self, uuid):
|
||||
"""returns the info JSON struct for a UUID"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute("SELECT json_info FROM info WHERE uuid = ?", (uuid,))
|
||||
@@ -393,7 +381,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def set_info_for_uuid(self, uuid, info):
|
||||
"""sets the info JSON struct for a UUID"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -407,7 +395,7 @@ class ExportDB(ExportDB_ABC):
|
||||
def get_exifdata_for_file(self, filename):
|
||||
"""returns the exifdata JSON struct for a file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -425,7 +413,7 @@ class ExportDB(ExportDB_ABC):
|
||||
def set_exifdata_for_file(self, filename, exifdata):
|
||||
"""sets the exifdata JSON struct for a file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -439,7 +427,7 @@ class ExportDB(ExportDB_ABC):
|
||||
def get_sidecar_for_file(self, filename):
|
||||
"""returns the sidecar data and signature for a file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -467,7 +455,7 @@ class ExportDB(ExportDB_ABC):
|
||||
def set_sidecar_for_file(self, filename, sidecar_data, sidecar_sig):
|
||||
"""sets the sidecar data and signature for a file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -480,7 +468,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_previous_uuids(self):
|
||||
"""returns list of UUIDs of previously exported photos found in export database"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
previous_uuids = []
|
||||
try:
|
||||
c = conn.cursor()
|
||||
@@ -493,7 +481,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def get_detected_text_for_uuid(self, uuid):
|
||||
"""Get the detected_text for a uuid"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -510,7 +498,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def set_detected_text_for_uuid(self, uuid, text_json):
|
||||
"""Set the detected text for uuid"""
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -524,39 +512,6 @@ class ExportDB(ExportDB_ABC):
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def set_metadata_for_file(self, filename, metadata):
|
||||
"""set metadata of filename in the database"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path))
|
||||
filename_normalized = self._normalize_filepath(filename)
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"UPDATE files SET metadata = ? WHERE filepath_normalized = ?;",
|
||||
(metadata, filename_normalized),
|
||||
)
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def get_metadata_for_file(self, filename):
|
||||
"""get metadata value for file"""
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"SELECT metadata FROM files WHERE filepath_normalized = ?",
|
||||
(filename,),
|
||||
)
|
||||
results = c.fetchone()
|
||||
metadata = results[0] if results else None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
metadata = None
|
||||
|
||||
return metadata
|
||||
|
||||
def set_data(
|
||||
self,
|
||||
filename,
|
||||
@@ -567,12 +522,11 @@ class ExportDB(ExportDB_ABC):
|
||||
edited_stat=None,
|
||||
info_json=None,
|
||||
exif_json=None,
|
||||
metadata=None,
|
||||
):
|
||||
"""sets all the data for file and uuid at once; if any value is None, does not set it"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path))
|
||||
filename_normalized = self._normalize_filepath(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
# update files table (if needed);
|
||||
@@ -621,15 +575,6 @@ class ExportDB(ExportDB_ABC):
|
||||
"INSERT OR REPLACE INTO exifdata(filepath_normalized, json_exifdata) VALUES (?, ?);",
|
||||
(filename_normalized, exif_json),
|
||||
)
|
||||
|
||||
if metadata is not None:
|
||||
c.execute(
|
||||
"UPDATE files "
|
||||
+ "SET metadata = ? "
|
||||
+ "WHERE filepath_normalized = ?;",
|
||||
(metadata, filename_normalized),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
@@ -637,16 +582,23 @@ class ExportDB(ExportDB_ABC):
|
||||
def close(self):
|
||||
"""close the database connection"""
|
||||
try:
|
||||
self._conn.close()
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
self._conn = None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def get_connection(self):
|
||||
if self._conn is None:
|
||||
self._conn = self._open_export_db(self._dbfile)
|
||||
return self._conn
|
||||
|
||||
def _set_stat_for_file(self, table, filename, stats):
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"INSERT OR REPLACE INTO {table}(filepath_normalized, mode, size, mtime) VALUES (?, ?, ?, ?);",
|
||||
@@ -656,7 +608,7 @@ class ExportDB(ExportDB_ABC):
|
||||
|
||||
def _get_stat_for_file(self, table, filename):
|
||||
filename = self._normalize_filepath_relative(filename)
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"SELECT mode, size, mtime FROM {table} WHERE filepath_normalized = ?",
|
||||
@@ -682,7 +634,7 @@ class ExportDB(ExportDB_ABC):
|
||||
conn = self._get_db_connection(dbfile)
|
||||
if not conn:
|
||||
raise Exception("Error getting connection to database {dbfile}")
|
||||
self._create_or_migrate_db_tables(conn)
|
||||
self._create_db_tables(conn)
|
||||
self.was_created = True
|
||||
self.was_upgraded = ()
|
||||
else:
|
||||
@@ -690,7 +642,9 @@ class ExportDB(ExportDB_ABC):
|
||||
self.was_created = False
|
||||
version_info = self._get_database_version(conn)
|
||||
if version_info[1] < OSXPHOTOS_EXPORTDB_VERSION:
|
||||
self._create_or_migrate_db_tables(conn)
|
||||
self._create_db_tables(conn)
|
||||
if version_info[1] < OSXPHOTOS_EXPORTDB_VERSION_MIGRATE_FILEPATH:
|
||||
self._migrate_normalized_filepath(conn)
|
||||
self.was_upgraded = (version_info[1], OSXPHOTOS_EXPORTDB_VERSION)
|
||||
else:
|
||||
self.was_upgraded = ()
|
||||
@@ -722,97 +676,104 @@ class ExportDB(ExportDB_ABC):
|
||||
).fetchone()
|
||||
return (version_info[0], version_info[1])
|
||||
|
||||
def _create_or_migrate_db_tables(self, conn):
|
||||
"""create (if not already created) the necessary db tables for the export database and apply any needed migrations
|
||||
|
||||
Args:
|
||||
conn: sqlite3 db connection
|
||||
def _create_db_tables(self, conn):
|
||||
"""create (if not already created) the necessary db tables for the export database
|
||||
conn: sqlite3 db connection
|
||||
"""
|
||||
try:
|
||||
version = self._get_database_version(conn)
|
||||
except Exception as e:
|
||||
version = (__version__, OSXPHOTOS_EXPORTDB_VERSION_MIGRATE_TABLES)
|
||||
|
||||
# Current for version 4.3, for anything greater, do a migration after creation
|
||||
sql_commands = [
|
||||
""" CREATE TABLE IF NOT EXISTS version (
|
||||
id INTEGER PRIMARY KEY,
|
||||
osxphotos TEXT,
|
||||
exportdb TEXT
|
||||
); """,
|
||||
""" CREATE TABLE IF NOT EXISTS about (
|
||||
id INTEGER PRIMARY KEY,
|
||||
about TEXT
|
||||
);""",
|
||||
""" CREATE TABLE IF NOT EXISTS files (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath TEXT NOT NULL,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
uuid TEXT,
|
||||
orig_mode INTEGER,
|
||||
orig_size INTEGER,
|
||||
orig_mtime REAL,
|
||||
exif_mode INTEGER,
|
||||
exif_size INTEGER,
|
||||
exif_mtime REAL
|
||||
); """,
|
||||
""" CREATE TABLE IF NOT EXISTS runs (
|
||||
id INTEGER PRIMARY KEY,
|
||||
datetime TEXT,
|
||||
python_path TEXT,
|
||||
script_name TEXT,
|
||||
args TEXT,
|
||||
cwd TEXT
|
||||
); """,
|
||||
""" CREATE TABLE IF NOT EXISTS info (
|
||||
id INTEGER PRIMARY KEY,
|
||||
uuid text NOT NULL,
|
||||
json_info JSON
|
||||
); """,
|
||||
""" CREATE TABLE IF NOT EXISTS exifdata (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
json_exifdata JSON
|
||||
); """,
|
||||
""" CREATE TABLE IF NOT EXISTS edited (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
mode INTEGER,
|
||||
size INTEGER,
|
||||
mtime REAL
|
||||
); """,
|
||||
""" CREATE TABLE IF NOT EXISTS converted (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
mode INTEGER,
|
||||
size INTEGER,
|
||||
mtime REAL
|
||||
); """,
|
||||
""" CREATE TABLE IF NOT EXISTS sidecar (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
sidecar_data TEXT,
|
||||
mode INTEGER,
|
||||
size INTEGER,
|
||||
mtime REAL
|
||||
); """,
|
||||
""" CREATE TABLE IF NOT EXISTS detected_text (
|
||||
id INTEGER PRIMARY KEY,
|
||||
uuid TEXT NOT NULL,
|
||||
text_data JSON
|
||||
); """,
|
||||
""" CREATE UNIQUE INDEX IF NOT EXISTS idx_files_filepath_normalized on files (filepath_normalized); """,
|
||||
""" CREATE UNIQUE INDEX IF NOT EXISTS idx_info_uuid on info (uuid); """,
|
||||
""" CREATE UNIQUE INDEX IF NOT EXISTS idx_exifdata_filename on exifdata (filepath_normalized); """,
|
||||
""" CREATE UNIQUE INDEX IF NOT EXISTS idx_edited_filename on edited (filepath_normalized);""",
|
||||
""" CREATE UNIQUE INDEX IF NOT EXISTS idx_converted_filename on converted (filepath_normalized);""",
|
||||
""" CREATE UNIQUE INDEX IF NOT EXISTS idx_sidecar_filename on sidecar (filepath_normalized);""",
|
||||
""" CREATE UNIQUE INDEX IF NOT EXISTS idx_detected_text on detected_text (uuid);""",
|
||||
]
|
||||
# create the tables if needed
|
||||
sql_commands = {
|
||||
"sql_version_table": """ CREATE TABLE IF NOT EXISTS version (
|
||||
id INTEGER PRIMARY KEY,
|
||||
osxphotos TEXT,
|
||||
exportdb TEXT
|
||||
); """,
|
||||
"sql_about_table": """ CREATE TABLE IF NOT EXISTS about (
|
||||
id INTEGER PRIMARY KEY,
|
||||
about TEXT
|
||||
);""",
|
||||
"sql_files_table": """ CREATE TABLE IF NOT EXISTS files (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath TEXT NOT NULL,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
uuid TEXT,
|
||||
orig_mode INTEGER,
|
||||
orig_size INTEGER,
|
||||
orig_mtime REAL,
|
||||
exif_mode INTEGER,
|
||||
exif_size INTEGER,
|
||||
exif_mtime REAL
|
||||
); """,
|
||||
"sql_files_table_migrate": """ CREATE TABLE IF NOT EXISTS files_migrate (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath TEXT NOT NULL,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
uuid TEXT,
|
||||
orig_mode INTEGER,
|
||||
orig_size INTEGER,
|
||||
orig_mtime REAL,
|
||||
exif_mode INTEGER,
|
||||
exif_size INTEGER,
|
||||
exif_mtime REAL,
|
||||
UNIQUE(filepath_normalized)
|
||||
); """,
|
||||
"sql_files_migrate": """ INSERT INTO files_migrate SELECT * FROM files;""",
|
||||
"sql_files_drop_tables": """ DROP TABLE files;""",
|
||||
"sql_files_alter": """ ALTER TABLE files_migrate RENAME TO files;""",
|
||||
"sql_runs_table": """ CREATE TABLE IF NOT EXISTS runs (
|
||||
id INTEGER PRIMARY KEY,
|
||||
datetime TEXT,
|
||||
python_path TEXT,
|
||||
script_name TEXT,
|
||||
args TEXT,
|
||||
cwd TEXT
|
||||
); """,
|
||||
"sql_info_table": """ CREATE TABLE IF NOT EXISTS info (
|
||||
id INTEGER PRIMARY KEY,
|
||||
uuid text NOT NULL,
|
||||
json_info JSON
|
||||
); """,
|
||||
"sql_exifdata_table": """ CREATE TABLE IF NOT EXISTS exifdata (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
json_exifdata JSON
|
||||
); """,
|
||||
"sql_edited_table": """ CREATE TABLE IF NOT EXISTS edited (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
mode INTEGER,
|
||||
size INTEGER,
|
||||
mtime REAL
|
||||
); """,
|
||||
"sql_converted_table": """ CREATE TABLE IF NOT EXISTS converted (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
mode INTEGER,
|
||||
size INTEGER,
|
||||
mtime REAL
|
||||
); """,
|
||||
"sql_sidecar_table": """ CREATE TABLE IF NOT EXISTS sidecar (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
sidecar_data TEXT,
|
||||
mode INTEGER,
|
||||
size INTEGER,
|
||||
mtime REAL
|
||||
); """,
|
||||
"sql_detected_text_table": """ CREATE TABLE IF NOT EXISTS detected_text (
|
||||
id INTEGER PRIMARY KEY,
|
||||
uuid TEXT NOT NULL,
|
||||
text_data JSON
|
||||
); """,
|
||||
"sql_files_idx": """ CREATE UNIQUE INDEX IF NOT EXISTS idx_files_filepath_normalized on files (filepath_normalized); """,
|
||||
"sql_info_idx": """ CREATE UNIQUE INDEX IF NOT EXISTS idx_info_uuid on info (uuid); """,
|
||||
"sql_exifdata_idx": """ CREATE UNIQUE INDEX IF NOT EXISTS idx_exifdata_filename on exifdata (filepath_normalized); """,
|
||||
"sql_edited_idx": """ CREATE UNIQUE INDEX IF NOT EXISTS idx_edited_filename on edited (filepath_normalized);""",
|
||||
"sql_converted_idx": """ CREATE UNIQUE INDEX IF NOT EXISTS idx_converted_filename on converted (filepath_normalized);""",
|
||||
"sql_sidecar_idx": """ CREATE UNIQUE INDEX IF NOT EXISTS idx_sidecar_filename on sidecar (filepath_normalized);""",
|
||||
"sql_detected_text_idx": """ CREATE UNIQUE INDEX IF NOT EXISTS idx_detected_text on detected_text (uuid);""",
|
||||
}
|
||||
try:
|
||||
c = conn.cursor()
|
||||
for cmd in sql_commands:
|
||||
for cmd in sql_commands.values():
|
||||
c.execute(cmd)
|
||||
c.execute(
|
||||
"INSERT INTO version(osxphotos, exportdb) VALUES (?, ?);",
|
||||
@@ -823,19 +784,6 @@ class ExportDB(ExportDB_ABC):
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
# perform needed migrations
|
||||
if version[1] < OSXPHOTOS_EXPORTDB_VERSION_MIGRATE_FILEPATH:
|
||||
self._migrate_normalized_filepath(conn)
|
||||
|
||||
if version[1] < OSXPHOTOS_EXPORTDB_VERSION:
|
||||
try:
|
||||
c = conn.cursor()
|
||||
# add metadata column to files to support --force-update
|
||||
c.execute("ALTER TABLE files ADD COLUMN metadata TEXT;")
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def __del__(self):
|
||||
"""ensure the database connection is closed"""
|
||||
try:
|
||||
@@ -849,7 +797,7 @@ class ExportDB(ExportDB_ABC):
|
||||
cmd = sys.argv[0]
|
||||
args = " ".join(sys.argv[1:]) if len(sys.argv) > 1 else ""
|
||||
cwd = os.getcwd()
|
||||
conn = self._conn
|
||||
conn = self.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -874,28 +822,6 @@ class ExportDB(ExportDB_ABC):
|
||||
"""Fix all filepath_normalized columns for unicode normalization"""
|
||||
# Prior to database version 4.3, filepath_normalized was not normalized for unicode
|
||||
c = conn.cursor()
|
||||
migration_sql = [
|
||||
""" CREATE TABLE IF NOT EXISTS files_migrate (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath TEXT NOT NULL,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
uuid TEXT,
|
||||
orig_mode INTEGER,
|
||||
orig_size INTEGER,
|
||||
orig_mtime REAL,
|
||||
exif_mode INTEGER,
|
||||
exif_size INTEGER,
|
||||
exif_mtime REAL,
|
||||
UNIQUE(filepath_normalized)
|
||||
); """,
|
||||
""" INSERT INTO files_migrate SELECT * FROM files;""",
|
||||
""" DROP TABLE files;""",
|
||||
""" ALTER TABLE files_migrate RENAME TO files;""",
|
||||
]
|
||||
for sql in migration_sql:
|
||||
c.execute(sql)
|
||||
conn.commit()
|
||||
|
||||
for table in ["converted", "edited", "exifdata", "files", "sidecar"]:
|
||||
old_values = c.execute(
|
||||
f"SELECT filepath_normalized, id FROM {table}"
|
||||
@@ -934,7 +860,7 @@ class ExportDBInMemory(ExportDB):
|
||||
conn = self._get_db_connection()
|
||||
if not conn:
|
||||
raise Exception("Error getting connection to in-memory database")
|
||||
self._create_or_migrate_db_tables(conn)
|
||||
self._create_db_tables(conn)
|
||||
self.was_created = True
|
||||
self.was_upgraded = ()
|
||||
else:
|
||||
@@ -957,7 +883,7 @@ class ExportDBInMemory(ExportDB):
|
||||
self.was_created = False
|
||||
_, exportdb_ver = self._get_database_version(conn)
|
||||
if exportdb_ver < OSXPHOTOS_EXPORTDB_VERSION:
|
||||
self._create_or_migrate_db_tables(conn)
|
||||
self._create_db_tables(conn)
|
||||
self.was_upgraded = (exportdb_ver, OSXPHOTOS_EXPORTDB_VERSION)
|
||||
else:
|
||||
self.was_upgraded = ()
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
""" utility functions for validating/sanitizing path components """
|
||||
|
||||
import re
|
||||
|
||||
import pathvalidate
|
||||
|
||||
from ._constants import MAX_DIRNAME_LEN, MAX_FILENAME_LEN
|
||||
|
||||
__all__ = [
|
||||
"sanitize_filepath",
|
||||
"is_valid_filepath",
|
||||
"sanitize_filename",
|
||||
"sanitize_dirname",
|
||||
"sanitize_filename",
|
||||
"sanitize_filepath",
|
||||
"sanitize_filestem_with_count",
|
||||
"sanitize_pathpart",
|
||||
]
|
||||
|
||||
@@ -53,6 +56,26 @@ def sanitize_filename(filename, replacement=":"):
|
||||
return filename
|
||||
|
||||
|
||||
def sanitize_filestem_with_count(file_stem: str, file_suffix: str) -> str:
|
||||
"""Sanitize a filestem that may end in (1), (2), etc. to ensure it + file_suffix doesn't exceed MAX_FILENAME_LEN"""
|
||||
filename_len = len(file_stem) + len(file_suffix)
|
||||
if filename_len <= MAX_FILENAME_LEN:
|
||||
return file_stem
|
||||
|
||||
drop = filename_len - MAX_FILENAME_LEN
|
||||
match = re.match(r"(.*)(\(\d+\))$", file_stem)
|
||||
if not match:
|
||||
# filename doesn't end in (1), (2), etc.
|
||||
# truncate filename to MAX_FILENAME_LEN
|
||||
return file_stem[:-drop]
|
||||
|
||||
# filename ends in (1), (2), etc.
|
||||
file_stem = match.group(1)
|
||||
file_count = match.group(2)
|
||||
file_stem = file_stem[:-drop]
|
||||
return f"{file_stem}{file_count}"
|
||||
|
||||
|
||||
def sanitize_dirname(dirname, replacement=":"):
|
||||
"""replace any illegal characters in a directory name and truncate directory name if needed
|
||||
|
||||
|
||||
@@ -82,9 +82,7 @@ class ExportOptions:
|
||||
exiftool: (bool, default = False): if True, will use exiftool to write metadata to export file
|
||||
export_as_hardlink: (bool, default=False): if True, will hardlink files instead of copying them
|
||||
export_db: (ExportDB_ABC): instance of a class that conforms to ExportDB_ABC with methods for getting/setting data related to exported files to compare update state
|
||||
face_regions: (bool, default=True): if True, will export face regions
|
||||
fileutil: (FileUtilABC): class that conforms to FileUtilABC with various file utilities
|
||||
force_update: (bool, default=False): if True, will export photo if any metadata has changed but export otherwise would not be triggered (e.g. metadata changed but not using exiftool)
|
||||
ignore_date_modified (bool): for use with sidecar and exiftool; if True, sets EXIF:ModifyDate to EXIF:DateTimeOriginal even if date_modified is set
|
||||
ignore_signature (bool, default=False): ignore file signature when used with update (look only at filename)
|
||||
increment (bool, default=True): if True, will increment file name until a non-existant name is found if overwrite=False and increment=False, export will fail if destination file already exists
|
||||
@@ -129,9 +127,7 @@ class ExportOptions:
|
||||
exiftool: bool = False
|
||||
export_as_hardlink: bool = False
|
||||
export_db: Optional[ExportDB_ABC] = None
|
||||
face_regions: bool = True
|
||||
fileutil: Optional[FileUtil] = None
|
||||
force_update: bool = False
|
||||
ignore_date_modified: bool = False
|
||||
ignore_signature: bool = False
|
||||
increment: bool = True
|
||||
@@ -351,6 +347,34 @@ class ExportResults:
|
||||
+ ")"
|
||||
)
|
||||
|
||||
def asdict(self):
|
||||
"""Return dict instance of class"""
|
||||
return {
|
||||
"exported": self.exported,
|
||||
"new": self.new,
|
||||
"updated": self.updated,
|
||||
"skipped": self.skipped,
|
||||
"exif_updated": self.exif_updated,
|
||||
"touched": self.touched,
|
||||
"to_touch": self.to_touch,
|
||||
"converted_to_jpeg": self.converted_to_jpeg,
|
||||
"sidecar_json_written": self.sidecar_json_written,
|
||||
"sidecar_json_skipped": self.sidecar_json_skipped,
|
||||
"sidecar_exiftool_written": self.sidecar_exiftool_written,
|
||||
"sidecar_exiftool_skipped": self.sidecar_exiftool_skipped,
|
||||
"sidecar_xmp_written": self.sidecar_xmp_written,
|
||||
"sidecar_xmp_skipped": self.sidecar_xmp_skipped,
|
||||
"missing": self.missing,
|
||||
"error": self.error,
|
||||
"exiftool_warning": self.exiftool_warning,
|
||||
"exiftool_error": self.exiftool_error,
|
||||
"deleted_files": self.deleted_files,
|
||||
"deleted_directories": self.deleted_directories,
|
||||
"exported_album": self.exported_album,
|
||||
"skipped_album": self.skipped_album,
|
||||
"missing_album": self.missing_album,
|
||||
}
|
||||
|
||||
|
||||
class PhotoExporter:
|
||||
def __init__(self, photo: "PhotoInfo"):
|
||||
@@ -453,101 +477,84 @@ class PhotoExporter:
|
||||
dest,
|
||||
options=options,
|
||||
)
|
||||
else:
|
||||
verbose(
|
||||
f"Skipping missing {'edited' if options.edited else 'original'} photo {self.photo.original_filename} ({self.photo.uuid})"
|
||||
)
|
||||
all_results.missing.append(dest)
|
||||
|
||||
# copy live photo associated .mov if requested
|
||||
if export_original and options.live_photo and self.photo.live_photo:
|
||||
if (
|
||||
export_original
|
||||
and options.live_photo
|
||||
and self.photo.live_photo
|
||||
and staged_files.original_live
|
||||
):
|
||||
live_name = dest.parent / f"{dest.stem}.mov"
|
||||
if staged_files.original_live:
|
||||
src_live = staged_files.original_live
|
||||
all_results += self._export_photo(
|
||||
src_live,
|
||||
live_name,
|
||||
# don't try to convert the live photo
|
||||
options=dataclasses.replace(options, convert_to_jpeg=False),
|
||||
)
|
||||
else:
|
||||
verbose(
|
||||
f"Skipping missing live photo for {self.photo.original_filename} ({self.photo.uuid})"
|
||||
)
|
||||
all_results.missing.append(live_name)
|
||||
src_live = staged_files.original_live
|
||||
all_results += self._export_photo(
|
||||
src_live,
|
||||
live_name,
|
||||
# don't try to convert the live photo
|
||||
options=dataclasses.replace(options, convert_to_jpeg=False),
|
||||
)
|
||||
|
||||
if export_edited and options.live_photo and self.photo.live_photo:
|
||||
if (
|
||||
export_edited
|
||||
and options.live_photo
|
||||
and self.photo.live_photo
|
||||
and staged_files.edited_live
|
||||
):
|
||||
live_name = dest.parent / f"{dest.stem}.mov"
|
||||
if staged_files.edited_live:
|
||||
src_live = staged_files.edited_live
|
||||
all_results += self._export_photo(
|
||||
src_live,
|
||||
live_name,
|
||||
# don't try to convert the live photo
|
||||
options=dataclasses.replace(options, convert_to_jpeg=False),
|
||||
)
|
||||
else:
|
||||
verbose(
|
||||
f"Skipping missing edited live photo for {self.photo.original_filename} ({self.photo.uuid})"
|
||||
)
|
||||
all_results.missing.append(live_name)
|
||||
src_live = staged_files.edited_live
|
||||
all_results += self._export_photo(
|
||||
src_live,
|
||||
live_name,
|
||||
# don't try to convert the live photo
|
||||
options=dataclasses.replace(options, convert_to_jpeg=False),
|
||||
)
|
||||
|
||||
# copy associated RAW image if requested
|
||||
if options.raw_photo and self.photo.has_raw:
|
||||
if staged_files.raw:
|
||||
raw_path = pathlib.Path(staged_files.raw)
|
||||
raw_ext = raw_path.suffix
|
||||
raw_name = dest.parent / f"{dest.stem}{raw_ext}"
|
||||
all_results += self._export_photo(
|
||||
raw_path,
|
||||
raw_name,
|
||||
options=options,
|
||||
)
|
||||
else:
|
||||
# guess at most likely raw name
|
||||
raw_ext = get_preferred_uti_extension(self.photo.uti_raw) or "raw"
|
||||
raw_name = dest.parent / f"{dest.stem}.{raw_ext}"
|
||||
all_results.missing.append(raw_name)
|
||||
verbose(
|
||||
f"Skipping missing raw photo for {self.photo.original_filename} ({self.photo.uuid})"
|
||||
)
|
||||
if options.raw_photo and self.photo.has_raw and staged_files.raw:
|
||||
raw_path = pathlib.Path(staged_files.raw)
|
||||
raw_ext = raw_path.suffix
|
||||
raw_name = dest.parent / f"{dest.stem}{raw_ext}"
|
||||
all_results += self._export_photo(
|
||||
raw_path,
|
||||
raw_name,
|
||||
options=options,
|
||||
)
|
||||
|
||||
# copy preview image if requested
|
||||
if options.preview:
|
||||
if staged_files.preview:
|
||||
# Photos keeps multiple different derivatives and path_derivatives returns list of them
|
||||
# first derivative is the largest so export that one
|
||||
preview_path = pathlib.Path(staged_files.preview)
|
||||
preview_ext = preview_path.suffix
|
||||
preview_name = (
|
||||
dest.parent / f"{dest.stem}{options.preview_suffix}{preview_ext}"
|
||||
)
|
||||
# if original is missing, the filename won't have been incremented so
|
||||
# need to check here to make sure there aren't duplicate preview files in
|
||||
# the export directory
|
||||
preview_name = (
|
||||
preview_name
|
||||
if any([options.overwrite, options.update, options.force_update])
|
||||
else pathlib.Path(increment_filename(preview_name))
|
||||
)
|
||||
all_results += self._export_photo(
|
||||
preview_path,
|
||||
preview_name,
|
||||
options=options,
|
||||
)
|
||||
else:
|
||||
# don't know what actual preview suffix would be but most likely jpeg
|
||||
preview_name = dest.parent / f"{dest.stem}{options.preview_suffix}.jpeg"
|
||||
all_results.missing.append(preview_name)
|
||||
verbose(
|
||||
f"Skipping missing preview photo for {self.photo.original_filename} ({self.photo.uuid})"
|
||||
)
|
||||
if options.preview and staged_files.preview:
|
||||
# Photos keeps multiple different derivatives and path_derivatives returns list of them
|
||||
# first derivative is the largest so export that one
|
||||
preview_path = pathlib.Path(staged_files.preview)
|
||||
preview_ext = preview_path.suffix
|
||||
preview_name = (
|
||||
dest.parent / f"{dest.stem}{options.preview_suffix}{preview_ext}"
|
||||
)
|
||||
# if original is missing, the filename won't have been incremented so
|
||||
# need to check here to make sure there aren't duplicate preview files in
|
||||
# the export directory
|
||||
preview_name = (
|
||||
preview_name
|
||||
if options.overwrite or options.update
|
||||
else pathlib.Path(increment_filename(preview_name, lock=True))
|
||||
)
|
||||
all_results += self._export_photo(
|
||||
preview_path,
|
||||
preview_name,
|
||||
options=options,
|
||||
)
|
||||
|
||||
all_results += self._write_sidecar_files(dest=dest, options=options)
|
||||
|
||||
if options.touch_file:
|
||||
all_results += self._touch_files(all_results, options)
|
||||
|
||||
# if src was missing, there will be a lock file for dest that needs cleaning up
|
||||
try:
|
||||
lock_file = dest.parent / f".{dest.name}.lock"
|
||||
self.fileutil.unlink(lock_file)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return all_results
|
||||
|
||||
def _touch_files(
|
||||
@@ -593,7 +600,7 @@ class PhotoExporter:
|
||||
|
||||
# if overwrite==False and #increment==False, export should fail if file exists
|
||||
if dest.exists() and not any(
|
||||
[options.increment, options.update, options.force_update, options.overwrite]
|
||||
[options.increment, options.update, options.overwrite]
|
||||
):
|
||||
raise FileExistsError(
|
||||
f"destination exists ({dest}); overwrite={options.overwrite}, increment={options.increment}"
|
||||
@@ -605,13 +612,13 @@ class PhotoExporter:
|
||||
# e.g. exporting sidecar for file1.png and file1.jpeg
|
||||
# if file1.png exists and exporting file1.jpeg,
|
||||
# dest will be file1 (1).jpeg even though file1.jpeg doesn't exist to prevent sidecar collision
|
||||
if options.increment and not any(
|
||||
[options.update, options.force_update, options.overwrite]
|
||||
):
|
||||
return pathlib.Path(increment_filename(dest))
|
||||
if options.increment and not options.update and not options.overwrite:
|
||||
return pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# if update and file exists, need to check to see if it's the write file by checking export db
|
||||
if (options.update or options.force_update) and dest.exists() and src:
|
||||
if options.update and dest.exists() and src:
|
||||
export_db = options.export_db
|
||||
fileutil = options.fileutil
|
||||
# destination exists, check to see if destination is the right UUID
|
||||
@@ -651,7 +658,9 @@ class PhotoExporter:
|
||||
break
|
||||
else:
|
||||
# increment the destination file
|
||||
dest = pathlib.Path(increment_filename(dest))
|
||||
dest = pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# either dest was updated in the if clause above or not updated at all
|
||||
return dest
|
||||
@@ -741,7 +750,7 @@ class PhotoExporter:
|
||||
# export live_photo .mov file?
|
||||
live_photo = bool(options.live_photo and self.photo.live_photo)
|
||||
|
||||
overwrite = any([options.overwrite, options.update, options.force_update])
|
||||
overwrite = options.overwrite or options.update
|
||||
|
||||
# figure out which photo version to request
|
||||
if options.edited or self.photo.shared:
|
||||
@@ -845,11 +854,13 @@ class PhotoExporter:
|
||||
raise ValueError("Edited version requested but photo has no adjustments")
|
||||
|
||||
dest = self._temp_dir_path / self.photo.original_filename
|
||||
dest = pathlib.Path(increment_filename(dest))
|
||||
dest = pathlib.Path(
|
||||
increment_filename(dest, lock=True, dry_run=options.dry_run)
|
||||
)
|
||||
|
||||
# export live_photo .mov file?
|
||||
live_photo = bool(options.live_photo and self.photo.live_photo)
|
||||
overwrite = any([options.overwrite, options.update, options.force_update])
|
||||
overwrite = options.overwrite or options.update
|
||||
edited_version = options.edited or self.photo.shared
|
||||
# shared photos (in shared albums) show up as not having adjustments (not edited)
|
||||
# but Photos is unable to export the "original" as only a jpeg copy is shared in iCloud
|
||||
@@ -945,7 +956,7 @@ class PhotoExporter:
|
||||
"""Copies filepath to a temp file preserving access and modification times"""
|
||||
filepath = pathlib.Path(filepath)
|
||||
dest = self._temp_dir_path / filepath.name
|
||||
dest = increment_filename(dest)
|
||||
dest = increment_filename(dest, lock=True)
|
||||
self.fileutil.copy(filepath, dest)
|
||||
stat = os.stat(filepath)
|
||||
self.fileutil.utime(dest, (stat.st_atime, stat.st_mtime))
|
||||
@@ -1001,16 +1012,18 @@ class PhotoExporter:
|
||||
fileutil = options.fileutil
|
||||
export_db = options.export_db
|
||||
|
||||
if options.update or options.force_update: # updating
|
||||
if options.update: # updating
|
||||
cmp_touch, cmp_orig = False, False
|
||||
if dest_exists:
|
||||
# update, destination exists, but we might not need to replace it...
|
||||
if options.exiftool:
|
||||
if options.ignore_signature:
|
||||
cmp_orig = True
|
||||
cmp_touch = fileutil.cmp(
|
||||
src, dest, mtime1=int(self.photo.date.timestamp())
|
||||
)
|
||||
elif options.exiftool:
|
||||
sig_exif = export_db.get_stat_exif_for_file(dest_str)
|
||||
cmp_orig = fileutil.cmp_file_sig(dest_str, sig_exif)
|
||||
if cmp_orig:
|
||||
# if signatures match also need to compare exifdata to see if metadata changed
|
||||
cmp_orig = not self._should_run_exiftool(dest_str, options)
|
||||
sig_exif = (
|
||||
sig_exif[0],
|
||||
sig_exif[1],
|
||||
@@ -1027,17 +1040,10 @@ class PhotoExporter:
|
||||
)
|
||||
cmp_touch = fileutil.cmp_file_sig(dest_str, sig_converted)
|
||||
else:
|
||||
cmp_orig = options.ignore_signature or fileutil.cmp(src, dest)
|
||||
cmp_orig = fileutil.cmp(src, dest)
|
||||
cmp_touch = fileutil.cmp(
|
||||
src, dest, mtime1=int(self.photo.date.timestamp())
|
||||
)
|
||||
if options.force_update:
|
||||
# need to also check the photo's metadata to that in the database
|
||||
# and if anything changed, we need to update the file
|
||||
# ony the hex digest of the metadata is stored in the database
|
||||
photo_digest = hexdigest(self.photo.json())
|
||||
db_digest = export_db.get_metadata_for_file(dest_str)
|
||||
cmp_orig = photo_digest == db_digest
|
||||
|
||||
sig_cmp = cmp_touch if options.touch_file else cmp_orig
|
||||
|
||||
@@ -1051,7 +1057,7 @@ class PhotoExporter:
|
||||
if sig_edited != (None, None, None)
|
||||
else False
|
||||
)
|
||||
sig_cmp = sig_cmp and (options.force_update or cmp_edited)
|
||||
sig_cmp = sig_cmp and cmp_edited
|
||||
|
||||
if (options.export_as_hardlink and dest.samefile(src)) or (
|
||||
not options.export_as_hardlink
|
||||
@@ -1094,9 +1100,7 @@ class PhotoExporter:
|
||||
edited_stat = (
|
||||
fileutil.file_sig(src) if options.edited else (None, None, None)
|
||||
)
|
||||
if dest_exists and any(
|
||||
[options.overwrite, options.update, options.force_update]
|
||||
):
|
||||
if dest_exists and (options.update or options.overwrite):
|
||||
# need to remove the destination first
|
||||
try:
|
||||
fileutil.unlink(dest)
|
||||
@@ -1117,7 +1121,9 @@ class PhotoExporter:
|
||||
# convert to a temp file before copying
|
||||
tmp_file = increment_filename(
|
||||
self._temp_dir_path
|
||||
/ f"{pathlib.Path(src).stem}_converted_to_jpeg.jpeg"
|
||||
/ f"{pathlib.Path(src).stem}_converted_to_jpeg.jpeg",
|
||||
lock=True,
|
||||
dry_run=options.dry_run,
|
||||
)
|
||||
fileutil.convert_to_jpeg(
|
||||
src, tmp_file, compression_quality=options.jpeg_quality
|
||||
@@ -1139,19 +1145,29 @@ class PhotoExporter:
|
||||
f"Error copying file {src} to {dest_str}: {e} ({lineno(__file__)})"
|
||||
) from e
|
||||
|
||||
json_info = self.photo.json()
|
||||
# don't set the metadata digest if not force_update so that future use of force_update catches metadata change
|
||||
metadata_digest = hexdigest(json_info) if options.force_update else None
|
||||
export_db.set_data(
|
||||
filename=dest_str,
|
||||
uuid=self.photo.uuid,
|
||||
orig_stat=fileutil.file_sig(dest_str),
|
||||
converted_stat=converted_stat,
|
||||
edited_stat=edited_stat,
|
||||
info_json=json_info,
|
||||
metadata=metadata_digest,
|
||||
info_json=self.photo.json(),
|
||||
)
|
||||
|
||||
# clean up lock files
|
||||
for file_ in set(
|
||||
converted_to_jpeg_files
|
||||
+ exported_files
|
||||
+ update_new_files
|
||||
+ update_updated_files
|
||||
):
|
||||
try:
|
||||
file_ = pathlib.Path(file_)
|
||||
lock_file = str(file_.parent / f".{file_.name}.lock")
|
||||
fileutil.unlink(lock_file)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ExportResults(
|
||||
converted_to_jpeg=converted_to_jpeg_files,
|
||||
error=exif_results.error,
|
||||
@@ -1249,13 +1265,10 @@ class PhotoExporter:
|
||||
sidecar_filename
|
||||
)
|
||||
write_sidecar = (
|
||||
not (options.update or options.force_update)
|
||||
not options.update
|
||||
or (options.update and not sidecar_filename.exists())
|
||||
or (
|
||||
(options.update or options.force_update)
|
||||
and not sidecar_filename.exists()
|
||||
)
|
||||
or (
|
||||
(options.update or options.force_update)
|
||||
options.update
|
||||
and (sidecar_digest != old_sidecar_digest)
|
||||
or not fileutil.cmp_file_sig(sidecar_filename, sidecar_sig)
|
||||
)
|
||||
@@ -1323,7 +1336,27 @@ class PhotoExporter:
|
||||
# determine if we need to write the exif metadata
|
||||
# if we are not updating, we always write
|
||||
# else, need to check the database to determine if we need to write
|
||||
run_exiftool = self._should_run_exiftool(dest, options)
|
||||
run_exiftool = not options.update
|
||||
if options.update:
|
||||
files_are_different = False
|
||||
old_data = export_db.get_exifdata_for_file(dest)
|
||||
if old_data is not None:
|
||||
old_data = json.loads(old_data)[0]
|
||||
current_data = json.loads(self._exiftool_json_sidecar(options=options))[
|
||||
0
|
||||
]
|
||||
if old_data != current_data:
|
||||
files_are_different = True
|
||||
|
||||
if old_data is None or files_are_different:
|
||||
# didn't have old data, assume we need to write it
|
||||
# or files were different
|
||||
run_exiftool = True
|
||||
else:
|
||||
verbose(
|
||||
f"Skipped up to date exiftool metadata for {pathlib.Path(dest).name}"
|
||||
)
|
||||
|
||||
if run_exiftool:
|
||||
verbose(f"Writing metadata with exiftool for {pathlib.Path(dest).name}")
|
||||
if not options.dry_run:
|
||||
@@ -1342,32 +1375,8 @@ class PhotoExporter:
|
||||
)
|
||||
exiftool_results.exif_updated.append(dest)
|
||||
exiftool_results.to_touch.append(dest)
|
||||
else:
|
||||
verbose(
|
||||
f"Skipped up to date exiftool metadata for {pathlib.Path(dest).name}"
|
||||
)
|
||||
return exiftool_results
|
||||
|
||||
def _should_run_exiftool(self, dest, options: ExportOptions) -> bool:
|
||||
"""Return True if exiftool should be run to update metadata"""
|
||||
run_exiftool = not (options.update or options.force_update)
|
||||
if options.update or options.force_update:
|
||||
files_are_different = False
|
||||
old_data = options.export_db.get_exifdata_for_file(dest)
|
||||
if old_data is not None:
|
||||
old_data = json.loads(old_data)[0]
|
||||
current_data = json.loads(self._exiftool_json_sidecar(options=options))[
|
||||
0
|
||||
]
|
||||
if old_data != current_data:
|
||||
files_are_different = True
|
||||
|
||||
if old_data is None or files_are_different:
|
||||
# didn't have old data, assume we need to write it
|
||||
# or files were different
|
||||
run_exiftool = True
|
||||
return run_exiftool
|
||||
|
||||
def _write_exif_data(self, filepath: str, options: ExportOptions):
|
||||
"""write exif data to image file at filepath
|
||||
|
||||
@@ -1541,9 +1550,6 @@ class PhotoExporter:
|
||||
person_list = sorted(list(set(person_list)))
|
||||
exif["XMP:PersonInImage"] = person_list.copy()
|
||||
|
||||
if options.face_regions and self.photo.face_info and self.photo._db._beta:
|
||||
exif.update(self._get_mwg_face_regions_exiftool())
|
||||
|
||||
# if self.favorite():
|
||||
# exif["Rating"] = 5
|
||||
|
||||
@@ -1626,42 +1632,6 @@ class PhotoExporter:
|
||||
|
||||
return exif
|
||||
|
||||
def _get_mwg_face_regions_exiftool(self):
|
||||
"""Return a dict with MWG face regions for use by exiftool"""
|
||||
if self.photo.orientation in [5, 6, 7, 8]:
|
||||
w = self.photo.height
|
||||
h = self.photo.width
|
||||
else:
|
||||
w = self.photo.width
|
||||
h = self.photo.height
|
||||
exif = {}
|
||||
exif["XMP:RegionAppliedToDimensionsW"] = w
|
||||
exif["XMP:RegionAppliedToDimensionsH"] = h
|
||||
exif["XMP:RegionAppliedToDimensionsUnit"] = "pixel"
|
||||
exif["XMP:RegionName"] = []
|
||||
exif["XMP:RegionType"] = []
|
||||
exif["XMP:RegionAreaX"] = []
|
||||
exif["XMP:RegionAreaY"] = []
|
||||
exif["XMP:RegionAreaW"] = []
|
||||
exif["XMP:RegionAreaH"] = []
|
||||
exif["XMP:RegionAreaUnit"] = []
|
||||
exif["XMP:RegionPersonDisplayName"] = []
|
||||
# exif["XMP:RegionRectangle"] = []
|
||||
for face in self.photo.face_info:
|
||||
if not face.name:
|
||||
continue
|
||||
area = face.mwg_rs_area
|
||||
exif["XMP:RegionName"].append(face.name)
|
||||
exif["XMP:RegionType"].append("Face")
|
||||
exif["XMP:RegionAreaX"].append(area.x)
|
||||
exif["XMP:RegionAreaY"].append(area.y)
|
||||
exif["XMP:RegionAreaW"].append(area.w)
|
||||
exif["XMP:RegionAreaH"].append(area.h)
|
||||
exif["XMP:RegionAreaUnit"].append("normalized")
|
||||
exif["XMP:RegionPersonDisplayName"].append(face.name)
|
||||
# exif["XMP:RegionRectangle"].append(f"{area.x},{area.y},{area.h},{area.w}")
|
||||
return exif
|
||||
|
||||
def _get_exif_keywords(self):
|
||||
"""returns list of keywords found in the file's exif metadata"""
|
||||
keywords = []
|
||||
|
||||
@@ -1728,11 +1728,7 @@ class PhotoInfo:
|
||||
if isinstance(o, (datetime.date, datetime.datetime)):
|
||||
return o.isoformat()
|
||||
|
||||
dict_data = self.asdict()
|
||||
for k, v in dict_data.items():
|
||||
if v and isinstance(v, (list, tuple)) and not isinstance(v[0], dict):
|
||||
dict_data[k] = sorted(v)
|
||||
return json.dumps(dict_data, sort_keys=True, default=default)
|
||||
return json.dumps(self.asdict(), sort_keys=True, default=default)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compare two PhotoInfo objects for equality"""
|
||||
|
||||
@@ -211,12 +211,10 @@ class SearchInfo:
|
||||
"""return list of text for a specified category ID"""
|
||||
if self._db_searchinfo:
|
||||
content = "normalized_string" if self._normalized else "content_string"
|
||||
return sorted(
|
||||
[
|
||||
rec[content]
|
||||
for rec in self._db_searchinfo
|
||||
if rec["category"] == category
|
||||
]
|
||||
)
|
||||
return [
|
||||
rec[content]
|
||||
for rec in self._db_searchinfo
|
||||
if rec["category"] == category
|
||||
]
|
||||
else:
|
||||
return []
|
||||
|
||||
@@ -103,8 +103,6 @@
|
||||
% if photo.face_info:
|
||||
<mwg-rs:Regions rdf:parseType="Resource">
|
||||
<mwg-rs:AppliedToDimensions rdf:parseType="Resource">
|
||||
<stDim:h>${photo.width if photo.orientation in [5, 6, 7, 8] else photo.height}</stDim:h>
|
||||
<stDim:w>${photo.height if photo.orientation in [5, 6, 7, 8] else photo.width}</stDim:w>
|
||||
<stDim:unit>pixel</stDim:unit>
|
||||
</mwg-rs:AppliedToDimensions>
|
||||
<mwg-rs:RegionList>
|
||||
|
||||
@@ -17,13 +17,14 @@ import sys
|
||||
import unicodedata
|
||||
import urllib.parse
|
||||
from plistlib import load as plistload
|
||||
from typing import Callable, List, Union, Optional
|
||||
from typing import Callable, List, Optional, Union
|
||||
|
||||
import CoreFoundation
|
||||
import objc
|
||||
from Foundation import NSFileManager, NSPredicate, NSString
|
||||
|
||||
from ._constants import UNICODE_FORMAT
|
||||
from .path_utils import sanitize_filestem_with_count
|
||||
|
||||
__all__ = [
|
||||
"dd_to_dms_str",
|
||||
@@ -428,7 +429,10 @@ def normalize_unicode(value):
|
||||
|
||||
|
||||
def increment_filename_with_count(
|
||||
filepath: Union[str, pathlib.Path], count: int = 0
|
||||
filepath: Union[str, pathlib.Path],
|
||||
count: int = 0,
|
||||
lock: bool = False,
|
||||
dry_run: bool = False,
|
||||
) -> str:
|
||||
"""Return filename (1).ext, etc if filename.ext exists
|
||||
|
||||
@@ -438,6 +442,8 @@ def increment_filename_with_count(
|
||||
Args:
|
||||
filepath: str or pathlib.Path; full path, including file name
|
||||
count: int; starting increment value
|
||||
lock: bool; if True, create a lock file in form .filename.lock to prevent other processes from using the same filename
|
||||
dry_run: bool; if True, don't actually create lock file
|
||||
|
||||
Returns:
|
||||
tuple of new filepath (or same if not incremented), count
|
||||
@@ -449,15 +455,32 @@ def increment_filename_with_count(
|
||||
dest_files = [f.stem.lower() for f in dest_files]
|
||||
dest_new = f"{dest.stem} ({count})" if count else dest.stem
|
||||
dest_new = normalize_fs_path(dest_new)
|
||||
dest_new = sanitize_filestem_with_count(dest_new, dest.suffix)
|
||||
if lock and not dry_run:
|
||||
dest_lock = "." + dest_new + dest.suffix + ".lock"
|
||||
dest_lock = dest.parent / dest_lock
|
||||
else:
|
||||
dest_lock = pathlib.Path("")
|
||||
|
||||
while dest_new.lower() in dest_files:
|
||||
while dest_new.lower() in dest_files or (
|
||||
lock and not dry_run and dest_lock.exists()
|
||||
):
|
||||
count += 1
|
||||
dest_new = normalize_fs_path(f"{dest.stem} ({count})")
|
||||
dest_new = sanitize_filestem_with_count(dest_new, dest.suffix)
|
||||
if lock:
|
||||
dest_lock = "." + dest_new + dest.suffix + ".lock"
|
||||
dest_lock = dest.parent / dest_lock
|
||||
if lock and not dry_run:
|
||||
dest_lock.touch()
|
||||
dest = dest.parent / f"{dest_new}{dest.suffix}"
|
||||
|
||||
return normalize_fs_path(str(dest)), count
|
||||
|
||||
|
||||
def increment_filename(filepath: Union[str, pathlib.Path]) -> str:
|
||||
def increment_filename(
|
||||
filepath: Union[str, pathlib.Path], lock: bool = False, dry_run: bool = False
|
||||
) -> str:
|
||||
"""Return filename (1).ext, etc if filename.ext exists
|
||||
|
||||
If file exists in filename's parent folder with same stem as filename,
|
||||
@@ -465,13 +488,17 @@ def increment_filename(filepath: Union[str, pathlib.Path]) -> str:
|
||||
|
||||
Args:
|
||||
filepath: str or pathlib.Path; full path, including file name
|
||||
lock: bool; if True, creates a lock file in form .filename.lock to prevent other processes from using the same filename
|
||||
dry_run: bool; if True, don't actually create lock file
|
||||
|
||||
Returns:
|
||||
new filepath (or same if not incremented)
|
||||
|
||||
Note: This obviously is subject to race condition so using with caution.
|
||||
Note: This obviously is subject to race condition so using with caution but using lock=True reduces the risk of race condition (but lock files must be cleaned up)
|
||||
"""
|
||||
new_filepath, _ = increment_filename_with_count(filepath)
|
||||
new_filepath, _ = increment_filename_with_count(
|
||||
filepath, lock=lock, dry_run=dry_run
|
||||
)
|
||||
return new_filepath
|
||||
|
||||
|
||||
|
||||
8
setup.py
8
setup.py
@@ -74,12 +74,11 @@ setup(
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
],
|
||||
install_requires=[
|
||||
"Click>=8.0.1,<9.0",
|
||||
"Mako>=1.1.4,<1.2.0",
|
||||
"PyYAML>=5.4.1,<5.5.0",
|
||||
"bitmath>=1.3.3.1,<1.4.0.0",
|
||||
"bpylist2==3.0.2",
|
||||
"Click>=8.0.1,<9.0",
|
||||
"dataclasses==0.7;python_version<'3.7'",
|
||||
"Mako>=1.1.4,<1.2.0",
|
||||
"more-itertools>=8.8.0,<9.0.0",
|
||||
"objexplore>=1.5.5,<1.6.0",
|
||||
"osxmetadata>=0.99.34,<1.0.0",
|
||||
@@ -87,15 +86,16 @@ setup(
|
||||
"photoscript>=0.1.4,<0.2.0",
|
||||
"ptpython>=3.0.20,<4.0.0",
|
||||
"pyobjc-core>=7.3,<9.0",
|
||||
"pyobjc-framework-AVFoundation>=7.3,<9.0",
|
||||
"pyobjc-framework-AppleScriptKit>=7.3,<9.0",
|
||||
"pyobjc-framework-AppleScriptObjC>=7.3,<9.0",
|
||||
"pyobjc-framework-AVFoundation>=7.3,<9.0",
|
||||
"pyobjc-framework-Cocoa>=7.3,<9.0",
|
||||
"pyobjc-framework-CoreServices>=7.2,<9.0",
|
||||
"pyobjc-framework-Metal>=7.3,<9.0",
|
||||
"pyobjc-framework-Photos>=7.3,<9.0",
|
||||
"pyobjc-framework-Quartz>=7.3,<9.0",
|
||||
"pyobjc-framework-Vision>=7.3,<9.0",
|
||||
"PyYAML>=5.4.1,<5.5.0",
|
||||
"rich>=10.6.0,<=11.0.0",
|
||||
"textx>=2.3.0,<3.0.0",
|
||||
"toml>=0.10.2,<0.11.0",
|
||||
|
||||
@@ -40,7 +40,7 @@ else:
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_singletons():
|
||||
"""Need to clean up any ExifTool singletons between tests"""
|
||||
""" Need to clean up any ExifTool singletons between tests """
|
||||
_ExifToolProc.instance = None
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ def pytest_collection_modifyitems(config, items):
|
||||
|
||||
|
||||
def copy_photos_library(photos_library=TEST_LIBRARY, delay=0):
|
||||
"""copy the test library and open Photos, returns path to copied library"""
|
||||
""" copy the test library and open Photos, returns path to copied library """
|
||||
script = AppleScript(
|
||||
"""
|
||||
tell application "Photos"
|
||||
@@ -118,9 +118,3 @@ def copy_photos_library(photos_library=TEST_LIBRARY, delay=0):
|
||||
@pytest.fixture
|
||||
def addalbum_library():
|
||||
copy_photos_library(delay=10)
|
||||
|
||||
|
||||
def copy_photos_library_to_path(photos_library_path: str, dest_path: str) -> str:
|
||||
"""Copy a photos library to a folder"""
|
||||
ditto(photos_library_path, dest_path)
|
||||
return dest_path
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,12 +1,10 @@
|
||||
""" Test the command line interface (CLI) """
|
||||
r""" Test the command line interface (CLI) """
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
from conftest import copy_photos_library_to_path
|
||||
|
||||
import osxphotos
|
||||
from osxphotos.exiftool import get_exiftool_path
|
||||
@@ -1448,6 +1446,7 @@ def test_query_exif_case_insensitive(exiftag, exifvalue, uuid_expected):
|
||||
|
||||
|
||||
def test_export():
|
||||
"""Test basic export"""
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
@@ -1464,6 +1463,24 @@ def test_export():
|
||||
files = glob.glob("*")
|
||||
assert sorted(files) == sorted(CLI_EXPORT_FILENAMES)
|
||||
|
||||
def test_export_multiprocess():
|
||||
"""Test basic export with --multiprocess"""
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import osxphotos
|
||||
from osxphotos.cli import export
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
# pylint: disable=not-context-manager
|
||||
with runner.isolated_filesystem():
|
||||
result = runner.invoke(export, [os.path.join(cwd, CLI_PHOTOS_DB), ".", "-V", "--multiprocess", "2"])
|
||||
assert result.exit_code == 0
|
||||
files = glob.glob("*")
|
||||
assert sorted(files) == sorted(CLI_EXPORT_FILENAMES)
|
||||
|
||||
|
||||
def test_export_uuid_from_file():
|
||||
"""Test export with --uuid-from-file"""
|
||||
@@ -1973,89 +1990,6 @@ def test_export_exiftool():
|
||||
assert exif[key] == CLI_EXIFTOOL[uuid][key]
|
||||
|
||||
|
||||
@pytest.mark.skipif(exiftool is None, reason="exiftool not installed")
|
||||
def test_export_exiftool_template_change():
|
||||
"""Test --exiftool when template changes with --update, #630"""
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
|
||||
from osxphotos.cli import export
|
||||
from osxphotos.exiftool import ExifTool
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
# pylint: disable=not-context-manager
|
||||
with runner.isolated_filesystem():
|
||||
for uuid in CLI_EXIFTOOL:
|
||||
# export with --exiftool
|
||||
result = runner.invoke(
|
||||
export,
|
||||
[
|
||||
os.path.join(cwd, PHOTOS_DB_15_7),
|
||||
".",
|
||||
"-V",
|
||||
"--exiftool",
|
||||
"--uuid",
|
||||
f"{uuid}",
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
|
||||
# export with --update, should be no change
|
||||
result = runner.invoke(
|
||||
export,
|
||||
[
|
||||
os.path.join(cwd, PHOTOS_DB_15_7),
|
||||
".",
|
||||
"-V",
|
||||
"--exiftool",
|
||||
"--update",
|
||||
"--uuid",
|
||||
f"{uuid}",
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert "exported: 0" in result.output
|
||||
|
||||
# export with --update and template change, should export
|
||||
result = runner.invoke(
|
||||
export,
|
||||
[
|
||||
os.path.join(cwd, PHOTOS_DB_15_7),
|
||||
".",
|
||||
"-V",
|
||||
"--exiftool",
|
||||
"--keyword-template",
|
||||
"FOO",
|
||||
"--update",
|
||||
"--uuid",
|
||||
f"{uuid}",
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert "updated EXIF data: 1" in result.output
|
||||
|
||||
# export with --update, nothing should export
|
||||
result = runner.invoke(
|
||||
export,
|
||||
[
|
||||
os.path.join(cwd, PHOTOS_DB_15_7),
|
||||
".",
|
||||
"-V",
|
||||
"--exiftool",
|
||||
"--keyword-template",
|
||||
"FOO",
|
||||
"--update",
|
||||
"--uuid",
|
||||
f"{uuid}",
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert "exported: 0" in result.output
|
||||
assert "updated EXIF data: 0" in result.output
|
||||
|
||||
|
||||
@pytest.mark.skipif(exiftool is None, reason="exiftool not installed")
|
||||
def test_export_exiftool_path():
|
||||
"""test --exiftool with --exiftool-path"""
|
||||
@@ -4176,8 +4110,7 @@ def test_export_filename_template_long_description():
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
for fname in CLI_EXPORTED_FILENAME_TEMPLATE_LONG_DESCRIPTION:
|
||||
assert pathlib.Path(fname).is_file()
|
||||
assert "exported: 1" in result.output
|
||||
|
||||
|
||||
def test_export_filename_template_3():
|
||||
@@ -4806,96 +4739,11 @@ def test_export_update_basic():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: 0, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: 0, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, updated EXIF data: 0, missing: 2, error: 0"
|
||||
in result.output
|
||||
)
|
||||
|
||||
|
||||
def test_export_force_update():
|
||||
"""test export with --force-update"""
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import osxphotos
|
||||
from osxphotos.cli import OSXPHOTOS_EXPORT_DB, export
|
||||
|
||||
runner = CliRunner()
|
||||
cwd = os.getcwd()
|
||||
# pylint: disable=not-context-manager
|
||||
with runner.isolated_filesystem():
|
||||
# basic export
|
||||
result = runner.invoke(export, [os.path.join(cwd, CLI_PHOTOS_DB), ".", "-V"])
|
||||
assert result.exit_code == 0
|
||||
files = glob.glob("*")
|
||||
assert sorted(files) == sorted(CLI_EXPORT_FILENAMES)
|
||||
assert os.path.isfile(OSXPHOTOS_EXPORT_DB)
|
||||
|
||||
src = os.path.join(cwd, CLI_PHOTOS_DB)
|
||||
dest = os.path.join(os.getcwd(), "export_force_update.photoslibrary")
|
||||
photos_db_path = copy_photos_library_to_path(src, dest)
|
||||
|
||||
# update
|
||||
result = runner.invoke(
|
||||
export, [os.path.join(cwd, photos_db_path), ".", "--update"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: 0, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
in result.output
|
||||
)
|
||||
|
||||
# force update must be run once to set the metadata digest info
|
||||
# in practice, this means that first time user uses --force-update, most files will likely be re-exported
|
||||
result = runner.invoke(
|
||||
export, [os.path.join(cwd, photos_db_path), ".", "--force-update"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
|
||||
# update a file
|
||||
dbpath = os.path.join(photos_db_path, "database/Photos.sqlite")
|
||||
try:
|
||||
conn = sqlite3.connect(dbpath)
|
||||
c = conn.cursor()
|
||||
except sqlite3.Error as e:
|
||||
pytest.exit(f"An error occurred opening sqlite file")
|
||||
|
||||
# photo is IMG_4547.jpg
|
||||
c.execute(
|
||||
"UPDATE ZADDITIONALASSETATTRIBUTES SET Z_OPT=9, ZTITLE='My Updated Title' WHERE Z_PK=8;"
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
# run --force-update to see if updated metadata forced update
|
||||
result = runner.invoke(
|
||||
export, [os.path.join(cwd, photos_db_path), ".", "--force-update"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: 1, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7-1}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
in result.output
|
||||
)
|
||||
|
||||
# update, nothing should export
|
||||
result = runner.invoke(
|
||||
export, [os.path.join(cwd, photos_db_path), ".", "--update"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: 0, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
in result.output
|
||||
)
|
||||
|
||||
# run --force-update, nothing should export
|
||||
result = runner.invoke(
|
||||
export, [os.path.join(cwd, photos_db_path), ".", "--force-update"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: 0, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
in result.output
|
||||
)
|
||||
|
||||
@pytest.mark.skipif(
|
||||
"OSXPHOTOS_TEST_EXPORT" not in os.environ,
|
||||
reason="Skip if not running on author's personal library.",
|
||||
@@ -5008,7 +4856,7 @@ def test_export_update_exiftool():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, skipped: 0, updated EXIF data: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, missing: 3, error: 1"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, skipped: 0, updated EXIF data: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, missing: 2, error: 1"
|
||||
in result.output
|
||||
)
|
||||
|
||||
@@ -5018,7 +4866,7 @@ def test_export_update_exiftool():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: 0, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: 0, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, updated EXIF data: 0, missing: 2, error: 0"
|
||||
in result.output
|
||||
)
|
||||
|
||||
@@ -5055,7 +4903,7 @@ def test_export_update_hardlink():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, skipped: 0, updated EXIF data: 0, missing: 3, error: 0"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, skipped: 0, updated EXIF data: 0, missing: 2, error: 0"
|
||||
in result.output
|
||||
)
|
||||
assert not os.path.samefile(CLI_EXPORT_UUID_FILENAME, photo.path)
|
||||
@@ -5094,7 +4942,7 @@ def test_export_update_hardlink_exiftool():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, skipped: 0, updated EXIF data: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, missing: 3, error: 1"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, skipped: 0, updated EXIF data: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, missing: 2, error: 1"
|
||||
in result.output
|
||||
)
|
||||
assert not os.path.samefile(CLI_EXPORT_UUID_FILENAME, photo.path)
|
||||
@@ -5132,7 +4980,7 @@ def test_export_update_edits():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 1, updated: 1, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7-2}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 1, updated: 1, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7-2}, updated EXIF data: 0, missing: 2, error: 0"
|
||||
in result.output
|
||||
)
|
||||
|
||||
@@ -5230,7 +5078,7 @@ def test_export_update_no_db():
|
||||
# edited files will be re-exported because there won't be an edited signature
|
||||
# in the database
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: {PHOTOS_EDITED_15_7}, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 0, updated: {PHOTOS_EDITED_15_7}, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7}, updated EXIF data: 0, missing: 2, error: 0"
|
||||
in result.output
|
||||
)
|
||||
assert os.path.isfile(OSXPHOTOS_EXPORT_DB)
|
||||
@@ -5270,7 +5118,7 @@ def test_export_then_hardlink():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, missing: 3, error: 0"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, missing: 2, error: 0"
|
||||
in result.output
|
||||
)
|
||||
assert os.path.samefile(CLI_EXPORT_UUID_FILENAME, photo.path)
|
||||
@@ -5295,11 +5143,11 @@ def test_export_dry_run():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, missing: 3, error: 0"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7}, missing: 2, error: 0"
|
||||
in result.output
|
||||
)
|
||||
for filepath in CLI_EXPORT_FILENAMES_DRY_RUN:
|
||||
assert re.search(r"Exported.*" + f"{re.escape(filepath)}", result.output)
|
||||
assert re.search(r"Exported.*" + f"{re.escape(normalize_fs_path(filepath))}", result.output)
|
||||
assert not os.path.isfile(normalize_fs_path(filepath))
|
||||
|
||||
|
||||
@@ -5341,7 +5189,7 @@ def test_export_update_edits_dry_run():
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 1, updated: 1, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7-2}, updated EXIF data: 0, missing: 3, error: 0"
|
||||
f"Processed: {PHOTOS_NOT_IN_TRASH_LEN_15_7} photos, exported: 1, updated: 1, skipped: {PHOTOS_NOT_IN_TRASH_LEN_15_7+PHOTOS_EDITED_15_7-2}, updated EXIF data: 0, missing: 2, error: 0"
|
||||
in result.output
|
||||
)
|
||||
|
||||
|
||||
@@ -140,7 +140,6 @@ def test_export_edited_exiftool(photosdb):
|
||||
got_dest = photos[0].export(
|
||||
dest, use_photos_export=True, edited=True, exiftool=True
|
||||
)
|
||||
logging.warning(got_dest)
|
||||
got_dest = got_dest[0]
|
||||
|
||||
assert os.path.isfile(got_dest)
|
||||
|
||||
@@ -7,7 +7,6 @@ import pytest
|
||||
EXIF_DATA = """[{"_CreatedBy": "osxphotos, https://github.com/RhetTbull/osxphotos", "EXIF:ImageDescription": "\u2068Elder Park\u2069, \u2068Adelaide\u2069, \u2068Australia\u2069", "XMP:Description": "\u2068Elder Park\u2069, \u2068Adelaide\u2069, \u2068Australia\u2069", "XMP:Title": "Elder Park", "EXIF:GPSLatitude": "34 deg 55' 8.01\" S", "EXIF:GPSLongitude": "138 deg 35' 48.70\" E", "Composite:GPSPosition": "34 deg 55' 8.01\" S, 138 deg 35' 48.70\" E", "EXIF:GPSLatitudeRef": "South", "EXIF:GPSLongitudeRef": "East", "EXIF:DateTimeOriginal": "2017:06:20 17:18:56", "EXIF:OffsetTimeOriginal": "+09:30", "EXIF:ModifyDate": "2020:05:18 14:42:04"}]"""
|
||||
INFO_DATA = """{"uuid": "3DD2C897-F19E-4CA6-8C22-B027D5A71907", "filename": "3DD2C897-F19E-4CA6-8C22-B027D5A71907.jpeg", "original_filename": "IMG_4547.jpg", "date": "2017-06-20T17:18:56.518000+09:30", "description": "\u2068Elder Park\u2069, \u2068Adelaide\u2069, \u2068Australia\u2069", "title": "Elder Park", "keywords": [], "labels": ["Statue", "Art"], "albums": ["AlbumInFolder"], "folders": {"AlbumInFolder": ["Folder1", "SubFolder2"]}, "persons": [], "path": "/Users/rhet/Pictures/Test-10.15.4.photoslibrary/originals/3/3DD2C897-F19E-4CA6-8C22-B027D5A71907.jpeg", "ismissing": false, "hasadjustments": true, "external_edit": false, "favorite": false, "hidden": false, "latitude": -34.91889167000001, "longitude": 138.59686167, "path_edited": "/Users/rhet/Pictures/Test-10.15.4.photoslibrary/resources/renders/3/3DD2C897-F19E-4CA6-8C22-B027D5A71907_1_201_a.jpeg", "shared": false, "isphoto": true, "ismovie": false, "uti": "public.jpeg", "burst": false, "live_photo": false, "path_live_photo": null, "iscloudasset": false, "incloud": null, "date_modified": "2020-05-18T14:42:04.608664+09:30", "portrait": false, "screenshot": false, "slow_mo": false, "time_lapse": false, "hdr": false, "selfie": false, "panorama": false, "has_raw": false, "uti_raw": null, "path_raw": null, "place": {"name": "Elder Park, Adelaide, South Australia, Australia, River Torrens", "names": {"field0": [], "country": ["Australia"], "state_province": ["South Australia"], "sub_administrative_area": ["Adelaide"], "city": ["Adelaide", "Adelaide"], "field5": [], "additional_city_info": ["Adelaide CBD", "Tarndanya"], "ocean": [], "area_of_interest": ["Elder Park", ""], "inland_water": ["River Torrens", "River Torrens"], "field10": [], "region": [], "sub_throughfare": [], "field13": [], "postal_code": [], "field15": [], "field16": [], "street_address": [], "body_of_water": ["River Torrens", "River Torrens"]}, "country_code": "AU", "ishome": false, "address_str": "River Torrens, Adelaide SA, Australia", "address": {"street": null, "sub_locality": "Tarndanya", "city": "Adelaide", "sub_administrative_area": "Adelaide", "state_province": "SA", "postal_code": null, "country": "Australia", "iso_country_code": "AU"}}, "exif": {"flash_fired": false, "iso": 320, "metering_mode": 3, "sample_rate": null, "track_format": null, "white_balance": 0, "aperture": 2.2, "bit_rate": null, "duration": null, "exposure_bias": 0.0, "focal_length": 4.15, "fps": null, "latitude": null, "longitude": null, "shutter_speed": 0.058823529411764705, "camera_make": "Apple", "camera_model": "iPhone 6s", "codec": null, "lens_model": "iPhone 6s back camera 4.15mm f/2.2"}}"""
|
||||
SIDECAR_DATA = """FOO_BAR"""
|
||||
METADATA_DATA = "FIZZ"
|
||||
|
||||
EXIF_DATA2 = """[{"_CreatedBy": "osxphotos, https://github.com/RhetTbull/osxphotos", "XMP:Title": "St. James's Park", "XMP:TagsList": ["London 2018", "St. James's Park", "England", "United Kingdom", "UK", "London"], "IPTC:Keywords": ["London 2018", "St. James's Park", "England", "United Kingdom", "UK", "London"], "XMP:Subject": ["London 2018", "St. James's Park", "England", "United Kingdom", "UK", "London"], "EXIF:GPSLatitude": "51 deg 30' 12.86\" N", "EXIF:GPSLongitude": "0 deg 7' 54.50\" W", "Composite:GPSPosition": "51 deg 30' 12.86\" N, 0 deg 7' 54.50\" W", "EXIF:GPSLatitudeRef": "North", "EXIF:GPSLongitudeRef": "West", "EXIF:DateTimeOriginal": "2018:10:13 09:18:12", "EXIF:OffsetTimeOriginal": "-04:00", "EXIF:ModifyDate": "2019:12:08 14:06:44"}]"""
|
||||
INFO_DATA2 = """{"uuid": "F2BB3F98-90F0-4E4C-A09B-25C6822A4529", "filename": "F2BB3F98-90F0-4E4C-A09B-25C6822A4529.jpeg", "original_filename": "IMG_8440.JPG", "date": "2019-06-11T11:42:06.711805-07:00", "description": null, "title": null, "keywords": [], "labels": ["Sky", "Cloudy", "Fence", "Land", "Outdoor", "Park", "Amusement Park", "Roller Coaster"], "albums": [], "folders": {}, "persons": [], "path": "/Volumes/MacBook Catalina - Data/Users/rhet/Pictures/Photos Library.photoslibrary/originals/F/F2BB3F98-90F0-4E4C-A09B-25C6822A4529.jpeg", "ismissing": false, "hasadjustments": false, "external_edit": false, "favorite": false, "hidden": false, "latitude": 33.81558666666667, "longitude": -117.99298, "path_edited": null, "shared": false, "isphoto": true, "ismovie": false, "uti": "public.jpeg", "burst": false, "live_photo": false, "path_live_photo": null, "iscloudasset": true, "incloud": true, "date_modified": "2019-10-14T00:51:47.141950-07:00", "portrait": false, "screenshot": false, "slow_mo": false, "time_lapse": false, "hdr": false, "selfie": false, "panorama": false, "has_raw": false, "uti_raw": null, "path_raw": null, "place": {"name": "Adventure City, Stanton, California, United States", "names": {"field0": [], "country": ["United States"], "state_province": ["California"], "sub_administrative_area": ["Orange"], "city": ["Stanton", "Anaheim", "Anaheim"], "field5": [], "additional_city_info": ["West Anaheim"], "ocean": [], "area_of_interest": ["Adventure City", "Adventure City"], "inland_water": [], "field10": [], "region": [], "sub_throughfare": [], "field13": [], "postal_code": [], "field15": [], "field16": [], "street_address": [], "body_of_water": []}, "country_code": "US", "ishome": false, "address_str": "Adventure City, 1240 S Beach Blvd, Anaheim, CA 92804, United States", "address": {"street": "1240 S Beach Blvd", "sub_locality": "West Anaheim", "city": "Stanton", "sub_administrative_area": "Orange", "state_province": "CA", "postal_code": "92804", "country": "United States", "iso_country_code": "US"}}, "exif": {"flash_fired": false, "iso": 25, "metering_mode": 5, "sample_rate": null, "track_format": null, "white_balance": 0, "aperture": 2.2, "bit_rate": null, "duration": null, "exposure_bias": 0.0, "focal_length": 4.15, "fps": null, "latitude": null, "longitude": null, "shutter_speed": 0.0004940711462450593, "camera_make": "Apple", "camera_model": "iPhone 6s", "codec": null, "lens_model": "iPhone 6s back camera 4.15mm f/2.2"}}"""
|
||||
@@ -65,7 +64,6 @@ def test_export_db():
|
||||
(10, 11, 12),
|
||||
INFO_DATA,
|
||||
EXIF_DATA,
|
||||
METADATA_DATA,
|
||||
)
|
||||
assert db.get_uuid_for_file(filepath2) == "BAR-FOO"
|
||||
assert db.get_info_for_uuid("BAR-FOO") == INFO_DATA
|
||||
@@ -75,7 +73,6 @@ def test_export_db():
|
||||
assert db.get_stat_converted_for_file(filepath2) == (7, 8, 9)
|
||||
assert db.get_stat_edited_for_file(filepath2) == (10, 11, 12)
|
||||
assert sorted(db.get_previous_uuids()) == (["BAR-FOO", "FOO-BAR"])
|
||||
assert db.get_metadata_for_file(filepath2) == METADATA_DATA
|
||||
|
||||
# test set_data value=None doesn't overwrite existing data
|
||||
db.set_data(
|
||||
@@ -87,7 +84,6 @@ def test_export_db():
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
assert db.get_uuid_for_file(filepath2) == "BAR-FOO"
|
||||
assert db.get_info_for_uuid("BAR-FOO") == INFO_DATA
|
||||
@@ -97,7 +93,6 @@ def test_export_db():
|
||||
assert db.get_stat_converted_for_file(filepath2) == (7, 8, 9)
|
||||
assert db.get_stat_edited_for_file(filepath2) == (10, 11, 12)
|
||||
assert sorted(db.get_previous_uuids()) == (["BAR-FOO", "FOO-BAR"])
|
||||
assert db.get_metadata_for_file(filepath2) == METADATA_DATA
|
||||
|
||||
# close and re-open
|
||||
db.close()
|
||||
@@ -112,8 +107,6 @@ def test_export_db():
|
||||
assert db.get_stat_edited_for_file(filepath2) == (10, 11, 12)
|
||||
assert sorted(db.get_previous_uuids()) == (["BAR-FOO", "FOO-BAR"])
|
||||
assert json.loads(db.get_detected_text_for_uuid("FOO-BAR")) == [["foo", 0.5]]
|
||||
assert db.get_metadata_for_file(filepath2) == METADATA_DATA
|
||||
|
||||
|
||||
# update data
|
||||
db.set_uuid_for_file(filepath, "FUBAR")
|
||||
@@ -155,10 +148,9 @@ def test_export_db_no_op():
|
||||
db.set_sidecar_for_file(filepath, SIDECAR_DATA, (13, 14, 15))
|
||||
assert db.get_sidecar_for_file(filepath) == (None, (None, None, None))
|
||||
assert db.get_previous_uuids() == []
|
||||
|
||||
db.set_detected_text_for_uuid("FOO-BAR", json.dumps([["foo", 0.5]]))
|
||||
assert db.get_detected_text_for_uuid("FOO-BAR") is None
|
||||
db.set_metadata_for_file(filepath, METADATA_DATA)
|
||||
assert db.get_metadata_for_file(filepath) is None
|
||||
|
||||
# test set_data which sets all at the same time
|
||||
filepath2 = os.path.join(tempdir.name, "test2.jpg")
|
||||
@@ -171,7 +163,6 @@ def test_export_db_no_op():
|
||||
(10, 11, 12),
|
||||
INFO_DATA,
|
||||
EXIF_DATA,
|
||||
METADATA_DATA,
|
||||
)
|
||||
assert db.get_uuid_for_file(filepath2) is None
|
||||
assert db.get_info_for_uuid("BAR-FOO") is None
|
||||
@@ -181,7 +172,6 @@ def test_export_db_no_op():
|
||||
assert db.get_stat_converted_for_file(filepath) is None
|
||||
assert db.get_stat_edited_for_file(filepath) is None
|
||||
assert db.get_previous_uuids() == []
|
||||
assert db.get_metadata_for_file(filepath) is None
|
||||
|
||||
# update data
|
||||
db.set_uuid_for_file(filepath, "FUBAR")
|
||||
@@ -217,7 +207,7 @@ def test_export_db_in_memory():
|
||||
db.set_sidecar_for_file(filepath, SIDECAR_DATA, (13, 14, 15))
|
||||
assert db.get_previous_uuids() == ["FOO-BAR"]
|
||||
db.set_detected_text_for_uuid("FOO-BAR", json.dumps([["foo", 0.5]]))
|
||||
db.set_metadata_for_file(filepath, METADATA_DATA)
|
||||
|
||||
db.close()
|
||||
|
||||
dbram = ExportDBInMemory(dbname, tempdir.name)
|
||||
@@ -236,7 +226,6 @@ def test_export_db_in_memory():
|
||||
assert dbram.get_sidecar_for_file(filepath) == (SIDECAR_DATA, (13, 14, 15))
|
||||
assert dbram.get_previous_uuids() == ["FOO-BAR"]
|
||||
assert json.loads(dbram.get_detected_text_for_uuid("FOO-BAR")) == [["foo", 0.5]]
|
||||
assert dbram.get_metadata_for_file(filepath) == METADATA_DATA
|
||||
|
||||
# change a value
|
||||
dbram.set_uuid_for_file(filepath, "FUBAR")
|
||||
@@ -248,7 +237,6 @@ def test_export_db_in_memory():
|
||||
dbram.set_stat_edited_for_file(filepath, (4, 5, 6))
|
||||
dbram.set_sidecar_for_file(filepath, "FUBAR", (20, 21, 22))
|
||||
dbram.set_detected_text_for_uuid("FUBAR", json.dumps([["bar", 0.5]]))
|
||||
dbram.set_metadata_for_file(filepath, "FUBAR")
|
||||
|
||||
assert dbram.get_uuid_for_file(filepath_lower) == "FUBAR"
|
||||
assert dbram.get_info_for_uuid("FUBAR") == INFO_DATA2
|
||||
@@ -260,7 +248,6 @@ def test_export_db_in_memory():
|
||||
assert dbram.get_sidecar_for_file(filepath) == ("FUBAR", (20, 21, 22))
|
||||
assert dbram.get_previous_uuids() == ["FUBAR"]
|
||||
assert json.loads(dbram.get_detected_text_for_uuid("FUBAR")) == [["bar", 0.5]]
|
||||
assert dbram.get_metadata_for_file(filepath) == "FUBAR"
|
||||
|
||||
dbram.close()
|
||||
|
||||
@@ -278,7 +265,6 @@ def test_export_db_in_memory():
|
||||
|
||||
assert db.get_info_for_uuid("FUBAR") is None
|
||||
assert db.get_detected_text_for_uuid("FUBAR") is None
|
||||
assert db.get_metadata_for_file(filepath) == METADATA_DATA
|
||||
|
||||
|
||||
def test_export_db_in_memory_nofile():
|
||||
@@ -309,7 +295,6 @@ def test_export_db_in_memory_nofile():
|
||||
dbram.set_stat_edited_for_file(filepath, (4, 5, 6))
|
||||
dbram.set_sidecar_for_file(filepath, "FUBAR", (20, 21, 22))
|
||||
dbram.set_detected_text_for_uuid("FUBAR", json.dumps([["bar", 0.5]]))
|
||||
dbram.set_metadata_for_file(filepath, METADATA_DATA)
|
||||
|
||||
assert dbram.get_uuid_for_file(filepath_lower) == "FUBAR"
|
||||
assert dbram.get_info_for_uuid("FUBAR") == INFO_DATA2
|
||||
@@ -321,6 +306,5 @@ def test_export_db_in_memory_nofile():
|
||||
assert dbram.get_sidecar_for_file(filepath) == ("FUBAR", (20, 21, 22))
|
||||
assert dbram.get_previous_uuids() == ["FUBAR"]
|
||||
assert json.loads(dbram.get_detected_text_for_uuid("FUBAR")) == [["bar", 0.5]]
|
||||
assert dbram.get_metadata_for_file(filepath) == METADATA_DATA
|
||||
|
||||
dbram.close()
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
""" Test path_utils.py """
|
||||
|
||||
|
||||
def test_sanitize_filename():
|
||||
"""test sanitize_filename"""
|
||||
|
||||
# subtract 6 chars from max length of 255 to account for lock file extension
|
||||
from osxphotos.path_utils import sanitize_filename
|
||||
from osxphotos._constants import MAX_FILENAME_LEN
|
||||
|
||||
@@ -30,25 +34,25 @@ def test_sanitize_filename():
|
||||
filename = "foo" + "x" * 512
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo" + "x" * 252
|
||||
assert new_filename == "foo" + "x" * (252 - 6)
|
||||
|
||||
# filename too long with extension
|
||||
filename = "x" * 512 + ".jpeg"
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "x" * 250 + ".jpeg"
|
||||
assert new_filename == "x" * (250 - 6) + ".jpeg"
|
||||
|
||||
# more than one extension
|
||||
filename = "foo.bar" + "x" * 255 + ".foo.bar.jpeg"
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo.bar" + "x" * 243 + ".jpeg"
|
||||
assert new_filename == "foo.bar" + "x" * (243 - 6) + ".jpeg"
|
||||
|
||||
# shorter than drop count
|
||||
filename = "foo." + "x" * 256
|
||||
new_filename = sanitize_filename(filename)
|
||||
assert len(new_filename) == MAX_FILENAME_LEN
|
||||
assert new_filename == "foo." + "x" * 251
|
||||
assert new_filename == "foo." + "x" * (251 - 6)
|
||||
|
||||
|
||||
def test_sanitize_dirname():
|
||||
@@ -83,6 +87,7 @@ def test_sanitize_dirname():
|
||||
assert len(new_dirname) == MAX_DIRNAME_LEN
|
||||
assert new_dirname == "foo" + "x" * 252
|
||||
|
||||
|
||||
def test_sanitize_pathpart():
|
||||
from osxphotos.path_utils import sanitize_pathpart
|
||||
from osxphotos._constants import MAX_DIRNAME_LEN
|
||||
@@ -114,4 +119,3 @@ def test_sanitize_pathpart():
|
||||
new_dirname = sanitize_pathpart(dirname)
|
||||
assert len(new_dirname) == MAX_DIRNAME_LEN
|
||||
assert new_dirname == "foo" + "x" * 252
|
||||
|
||||
|
||||
Reference in New Issue
Block a user