Compare commits
214 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f67f239278 | ||
|
|
441de711dc | ||
|
|
1450b3ccac | ||
|
|
c06c230a46 | ||
|
|
b1171e96cc | ||
|
|
8c4fe40aa6 | ||
|
|
f416418546 | ||
|
|
8e9691d6d7 | ||
|
|
cafa483cfc | ||
|
|
11d368a69c | ||
|
|
bd9d5a26f3 | ||
|
|
ec19636cbf | ||
|
|
48e9c32add | ||
|
|
b455bd4c4a | ||
|
|
d125854f2a | ||
|
|
e228cfab74 | ||
|
|
85760dc4fe | ||
|
|
be07f90e5a | ||
|
|
a80dee401c | ||
|
|
e67fce2871 | ||
|
|
53304d7023 | ||
|
|
d1af14dbb4 | ||
|
|
ca8f2b8d5c | ||
|
|
e3e5a20681 | ||
|
|
98b3f63a92 | ||
|
|
c8128203f4 | ||
|
|
c061161605 | ||
|
|
397db0d72f | ||
|
|
605d63aa4f | ||
|
|
ac9b6d52a3 | ||
|
|
57315d4449 | ||
|
|
49cfd0b93e | ||
|
|
b15f744aab | ||
|
|
cc8b10e792 | ||
|
|
93835130c2 | ||
|
|
df13683d11 | ||
|
|
b0ec6c6b36 | ||
|
|
cb124713d6 | ||
|
|
97d3c69ade | ||
|
|
a8622b6b90 | ||
|
|
cceab62993 | ||
|
|
69356c0b57 | ||
|
|
5eb0876e33 | ||
|
|
7444b6d173 | ||
|
|
180450c1e7 | ||
|
|
00e16611fc | ||
|
|
65674f57bc | ||
|
|
7af1ccd4ed | ||
|
|
1b6f661e6b | ||
|
|
a57da2346b | ||
|
|
3fe03cd127 | ||
|
|
5cc98c338b | ||
|
|
1c9d4f282b | ||
|
|
1ceda15134 | ||
|
|
a80071111f | ||
|
|
072a8d795e | ||
|
|
b35b071634 | ||
|
|
56a000609f | ||
|
|
54d5d4b7ba | ||
|
|
38137a1351 | ||
|
|
4b29a2e05f | ||
|
|
9be0f849b7 | ||
|
|
ccb5f252d1 | ||
|
|
d8a64c9573 | ||
|
|
81d4e392c3 | ||
|
|
85d2baac10 | ||
|
|
8a768e62ce | ||
|
|
1c8eb764f5 | ||
|
|
8e4b88ad1f | ||
|
|
3f80f786a3 | ||
|
|
a337e79e13 | ||
|
|
ec68feec49 | ||
|
|
9b9b54e590 | ||
|
|
22f1e8f2a6 | ||
|
|
1867c1d747 | ||
|
|
87eb84fddd | ||
|
|
15a3736b74 | ||
|
|
cf28cb6452 | ||
|
|
f20fadcef7 | ||
|
|
3bac106eb7 | ||
|
|
47d1c82c03 | ||
|
|
6f281711e2 | ||
|
|
4b30b3b426 | ||
|
|
1fa9583ea6 | ||
|
|
235e1fb1a6 | ||
|
|
36c2821a0f | ||
|
|
ed425724a0 | ||
|
|
55daa31c71 | ||
|
|
b6ac9e1ea3 | ||
|
|
9d151478d6 | ||
|
|
7d55844390 | ||
|
|
f398e9116f | ||
|
|
4fe8190b57 | ||
|
|
7e42ebb240 | ||
|
|
edae116baa | ||
|
|
d542cda17d | ||
|
|
99b5b54c6d | ||
|
|
379feddcda | ||
|
|
24285f5dd2 | ||
|
|
3cb3ebb300 | ||
|
|
16037f10fa | ||
|
|
ebd21491ac | ||
|
|
b7c7b9f066 | ||
|
|
21e7020fec | ||
|
|
952741d488 | ||
|
|
9fef12ed37 | ||
|
|
97362fc0f1 | ||
|
|
e09f0b40f1 | ||
|
|
b749681c6d | ||
|
|
8544667c72 | ||
|
|
d6a22b765a | ||
|
|
96365728c2 | ||
|
|
c01f713f00 | ||
|
|
1aa3838c38 | ||
|
|
cde56e9d13 | ||
|
|
1c9da5ed6f | ||
|
|
d74f7f499b | ||
|
|
c85bb02304 | ||
|
|
3e5062684a | ||
|
|
626e460aab | ||
|
|
1820715849 | ||
|
|
a6ca3f453c | ||
|
|
ddaa66d19e | ||
|
|
6073acc9d3 | ||
|
|
bae0283441 | ||
|
|
507c4a3740 | ||
|
|
6a898886dd | ||
|
|
01cd7fed6d | ||
|
|
e8273c9752 | ||
|
|
fd5e748dca | ||
|
|
c02953ef5f | ||
|
|
daea30f162 | ||
|
|
be2e16769d | ||
|
|
b0456dc8e6 | ||
|
|
c8bd8ea2f3 | ||
|
|
67a9a9e21b | ||
|
|
427c4c0bc4 | ||
|
|
f0d200435a | ||
|
|
49de3ecd2e | ||
|
|
c06dd4233f | ||
|
|
fd638427d0 | ||
|
|
6fb8fe8142 | ||
|
|
69cc6ce680 | ||
|
|
dfc31ff15f | ||
|
|
707544752e | ||
|
|
564a5073f1 | ||
|
|
d769dde358 | ||
|
|
d066435e3d | ||
|
|
8f0307fc24 | ||
|
|
908fead8a2 | ||
|
|
072e894e56 | ||
|
|
47e57ee98e | ||
|
|
e90d9c6e11 | ||
|
|
2feb0999b3 | ||
|
|
d26ea0dccc | ||
|
|
aeae1e0b8a | ||
|
|
128a35d6c0 | ||
|
|
57d9163090 | ||
|
|
a236ed42c1 | ||
|
|
ad58b03f2d | ||
|
|
066215621d | ||
|
|
7f0558e08b | ||
|
|
4441d071b3 | ||
|
|
9da7ad6dcc | ||
|
|
91f71df07d | ||
|
|
9e314bfaf4 | ||
|
|
0948b24821 | ||
|
|
4b951826db | ||
|
|
cda5f44693 | ||
|
|
960487f296 | ||
|
|
6ab1511b4f | ||
|
|
b8da9765b8 | ||
|
|
21547a8eaa | ||
|
|
23b26ed130 | ||
|
|
92e5bdd2e9 | ||
|
|
a723881dd3 | ||
|
|
b338b34d50 | ||
|
|
816b98e617 | ||
|
|
39ffef502c | ||
|
|
1e08a7449e | ||
|
|
0940f039d3 | ||
|
|
c11afbaa6e | ||
|
|
940fc33f11 | ||
|
|
8542e1a97f | ||
|
|
dd20b8d8ac | ||
|
|
765a3d27c5 | ||
|
|
b68f4c2b8b | ||
|
|
cc9220e076 | ||
|
|
e99391a68e | ||
|
|
783e097da3 | ||
|
|
279ab36929 | ||
|
|
1f13ba837f | ||
|
|
dc87194eec | ||
|
|
d32774f495 | ||
|
|
7da02991cf | ||
|
|
6f413c64d7 | ||
|
|
2d7d0b86e0 | ||
|
|
acb6b9e72f | ||
|
|
f1ade92e98 | ||
|
|
a27ce33473 | ||
|
|
2b7d84a4d1 | ||
|
|
92b405a166 | ||
|
|
15d7ad538d | ||
|
|
1f8fd6e929 | ||
|
|
08a9793651 | ||
|
|
2c8fc9789f | ||
|
|
dbededcd0e | ||
|
|
ef799610ae | ||
|
|
8dea41961b | ||
|
|
5799afbdc1 | ||
|
|
9a0fc0db3e | ||
|
|
549170fa36 | ||
|
|
dede640ef3 | ||
|
|
2b3491bdc4 |
7
.github/workflows/pythonpackage.yml
vendored
7
.github/workflows/pythonpackage.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
strategy:
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
python-version: [3.6, 3.7]
|
||||
python-version: [3.8]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
@@ -21,8 +21,8 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# - name: Lint with flake8
|
||||
# run: |
|
||||
# pip install flake8
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
# flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
@@ -31,4 +31,5 @@ jobs:
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pip install pytest
|
||||
pip install pytest-mock
|
||||
python -m pytest tests/
|
||||
|
||||
299
CHANGELOG.md
299
CHANGELOG.md
@@ -4,6 +4,299 @@ All notable changes to this project will be documented in this file. Dates are d
|
||||
|
||||
Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
|
||||
|
||||
#### [v0.29.0](https://github.com/RhetTbull/osxphotos/compare/v0.28.19...v0.29.0)
|
||||
|
||||
> 23 May 2020
|
||||
|
||||
- Made --exiftool and --export-as-hardlink incompatible in CLI to fix #132 [`#132`](https://github.com/RhetTbull/osxphotos/issues/132)
|
||||
- Added --update to CLI export; reference issue #100 [`b1171e9`](https://github.com/RhetTbull/osxphotos/commit/b1171e96cc06362555725995bb311317eb163e49)
|
||||
- Added as_dict to PlaceInfo [`8c4fe40`](https://github.com/RhetTbull/osxphotos/commit/8c4fe40aa6850f166e526cffaa088550884399af)
|
||||
- Updated README.md [`11d368a`](https://github.com/RhetTbull/osxphotos/commit/11d368a69cbe67e909e64b020f0334fc09dd3ac4)
|
||||
- Updated CHANGELOG.md [`cafa483`](https://github.com/RhetTbull/osxphotos/commit/cafa483cfc228c651a03d3361d6d48a35deab1e8)
|
||||
- version bump [`c06c230`](https://github.com/RhetTbull/osxphotos/commit/c06c230a469754691d11fff1034fb02daeeba649)
|
||||
|
||||
#### [v0.28.19](https://github.com/RhetTbull/osxphotos/compare/v0.28.18...v0.28.19)
|
||||
|
||||
> 15 May 2020
|
||||
|
||||
- Added label and label_normalized to template system, closes #130 [`#130`](https://github.com/RhetTbull/osxphotos/issues/130)
|
||||
- Revert "test library updates" [`48e9c32`](https://github.com/RhetTbull/osxphotos/commit/48e9c32add549e66c3ef8c65f8821f5033b55b11)
|
||||
- test library updates [`d125854`](https://github.com/RhetTbull/osxphotos/commit/d125854f2a04e37747af3e0796370a565c1c9bd0)
|
||||
- Updated CHANGELOG.md [`e228cfa`](https://github.com/RhetTbull/osxphotos/commit/e228cfab746055c8d6df428aebe0ed001fb6d4d0)
|
||||
- version bump [`bd9d5a2`](https://github.com/RhetTbull/osxphotos/commit/bd9d5a26f3bfcbb33896a139fa86cdab46768103)
|
||||
- Update README.md [`85760dc`](https://github.com/RhetTbull/osxphotos/commit/85760dc4fe2274d826ed80494fd4e66866398609)
|
||||
|
||||
#### [v0.28.18](https://github.com/RhetTbull/osxphotos/compare/v0.28.17...v0.28.18)
|
||||
|
||||
> 14 May 2020
|
||||
|
||||
- Implemented PhotoInfo.exiftool [`a80dee4`](https://github.com/RhetTbull/osxphotos/commit/a80dee401c7eb959f6ad6d93a3272657ed28f521)
|
||||
- Updated CHANGELOG.md [`e67fce2`](https://github.com/RhetTbull/osxphotos/commit/e67fce28714cf4065b64202bb3b149ba5bec5be4)
|
||||
|
||||
#### [v0.28.17](https://github.com/RhetTbull/osxphotos/compare/v0.28.15...v0.28.17)
|
||||
|
||||
> 14 May 2020
|
||||
|
||||
- Added ExifInfo (Photos 5 only) [`53304d7`](https://github.com/RhetTbull/osxphotos/commit/53304d702317d007056c1d12064503c3ec4ae6f6)
|
||||
- Added as_dict to ExifTool [`d1af14d`](https://github.com/RhetTbull/osxphotos/commit/d1af14dbb4d441a62d352123774e51fa3538db97)
|
||||
|
||||
#### [v0.28.15](https://github.com/RhetTbull/osxphotos/compare/v0.28.13...v0.28.15)
|
||||
|
||||
> 11 May 2020
|
||||
|
||||
- fixed some minor findings... [`#127`](https://github.com/RhetTbull/osxphotos/pull/127)
|
||||
- added --export-as-hardlink option [`#126`](https://github.com/RhetTbull/osxphotos/pull/126)
|
||||
- Added test for folder_names on 10.15.4, closes #119 [`#119`](https://github.com/RhetTbull/osxphotos/issues/119)
|
||||
- Refactored photosdb and photoinfo to add SearchInfo and labels [`98b3f63`](https://github.com/RhetTbull/osxphotos/commit/98b3f63a92aa2105f8fa97af992fc6fe2d78b973)
|
||||
- Added additional test for --export-as-hardlink [`57315d4`](https://github.com/RhetTbull/osxphotos/commit/57315d44497fde977956f76f667470208f11aa2d)
|
||||
- added CHANGELOG.md [`00e1661`](https://github.com/RhetTbull/osxphotos/commit/00e16611fc86c05fb090d036084db9eb42444071)
|
||||
- Updated a couple of tests to use pytest-mock [`397db0d`](https://github.com/RhetTbull/osxphotos/commit/397db0d72fb218669a9ecbff134fa9b392a14661)
|
||||
- added test for export using hardlinks, fixed a test that failed if users locale settings were different to en_US [`b0ec6c6`](https://github.com/RhetTbull/osxphotos/commit/b0ec6c6b36d8cfe05723d47b210d9d7c5aabdfe5)
|
||||
|
||||
#### [v0.28.13](https://github.com/RhetTbull/osxphotos/compare/v0.28.10...v0.28.13)
|
||||
|
||||
> 2 May 2020
|
||||
|
||||
- added --keyword-template [`65674f5`](https://github.com/RhetTbull/osxphotos/commit/65674f57bc174c078e6c47f12ba3aaba87bfa3a4)
|
||||
- Fixed bug related to issue #119 [`7af1ccd`](https://github.com/RhetTbull/osxphotos/commit/7af1ccd4ed22ea7f0f86973bfba7f108b6650291)
|
||||
- test library updates [`1b6f661`](https://github.com/RhetTbull/osxphotos/commit/1b6f661e6b59c003d3b8cb35226ffb51469be508)
|
||||
|
||||
#### [v0.28.10](https://github.com/RhetTbull/osxphotos/compare/v0.28.8...v0.28.10)
|
||||
|
||||
> 29 April 2020
|
||||
|
||||
- Bug fix for albums in Photos <= 4 to address issue #116 [`a57da23`](https://github.com/RhetTbull/osxphotos/commit/a57da2346b282d731ed41db600bfc5cbeb1a0992)
|
||||
- version bump for pypi [`3fe03cd`](https://github.com/RhetTbull/osxphotos/commit/3fe03cd12752c2a7769007b6d934f1efe9f9c4d2)
|
||||
|
||||
#### [v0.28.8](https://github.com/RhetTbull/osxphotos/compare/v0.28.7...v0.28.8)
|
||||
|
||||
> 28 April 2020
|
||||
|
||||
- Fixed implementation of use_albums_as_keywords and use_persons_as_keywords, closes #115 [`#115`](https://github.com/RhetTbull/osxphotos/issues/115)
|
||||
- Updated CHANGELOG.md [`072a8d7`](https://github.com/RhetTbull/osxphotos/commit/072a8d795e5e15fa8ca8d8872aecf4cddd7837f7)
|
||||
- Update README.md [`5cc98c3`](https://github.com/RhetTbull/osxphotos/commit/5cc98c338bcc19fd05bf293eb3afe24c07c8b380)
|
||||
- Updated README.md [`a800711`](https://github.com/RhetTbull/osxphotos/commit/a80071111f810a1d7d6e2d735839e85499091ea4)
|
||||
- Update README.md [`1c9d4f2`](https://github.com/RhetTbull/osxphotos/commit/1c9d4f282beea2ac12273c8d0f9453bad1255c2c)
|
||||
|
||||
#### [v0.28.7](https://github.com/RhetTbull/osxphotos/compare/v0.28.6...v0.28.7)
|
||||
|
||||
> 28 April 2020
|
||||
|
||||
- Added --album-keyword and --person-keyword to CLI, closes #61 [`#61`](https://github.com/RhetTbull/osxphotos/issues/61)
|
||||
- Updated test libraries [`54d5d4b`](https://github.com/RhetTbull/osxphotos/commit/54d5d4b7ba99204f58e723231309ab6e306be28c)
|
||||
- Updated CHANGELOG.md [`38137a1`](https://github.com/RhetTbull/osxphotos/commit/38137a1351cdb7ab72393ea03828933dac0b76b0)
|
||||
- Updated tests/README.md [`56a0006`](https://github.com/RhetTbull/osxphotos/commit/56a000609f2f08d0f8800fec49cada2980c3bb9d)
|
||||
|
||||
#### [v0.28.6](https://github.com/RhetTbull/osxphotos/compare/v0.28.5...v0.28.6)
|
||||
|
||||
> 26 April 2020
|
||||
|
||||
- Fixed locale bug in templates, closes #113 [`#113`](https://github.com/RhetTbull/osxphotos/issues/113)
|
||||
- Updated CHANGELOG.md [`81d4e39`](https://github.com/RhetTbull/osxphotos/commit/81d4e392c39f0fe6f967a447c7d0c970bf224032)
|
||||
- Updated test to avoid issue with GitHub workflow [`9be0f84`](https://github.com/RhetTbull/osxphotos/commit/9be0f849b73061d053d30274ff3295b79c88f0b6)
|
||||
- Update pythonpackage.yml to remove older pythons [`ccb5f25`](https://github.com/RhetTbull/osxphotos/commit/ccb5f252d14e9335ae04a2e338a6d527b80c9a93)
|
||||
|
||||
#### [v0.28.5](https://github.com/RhetTbull/osxphotos/compare/0.28.2...v0.28.5)
|
||||
|
||||
> 21 April 2020
|
||||
|
||||
- added __len__ to PhotosDB, closes #44 [`#44`](https://github.com/RhetTbull/osxphotos/issues/44)
|
||||
- Updated use of _PHOTOS_4_VERSION, closes #106 [`#106`](https://github.com/RhetTbull/osxphotos/issues/106)
|
||||
- Updated tests and test library with RAW images [`9b9b54e`](https://github.com/RhetTbull/osxphotos/commit/9b9b54e590e43ae49fb3ae41d493a1f8faec4181)
|
||||
- Updated setup.py to resolve issue with bpylist2 on python < 3.8 [`8e4b88a`](https://github.com/RhetTbull/osxphotos/commit/8e4b88ad1fc18438f941e045bfc8aeac878914f9)
|
||||
- Added cli.py for use with pyinstaller [`cf28cb6`](https://github.com/RhetTbull/osxphotos/commit/cf28cb6452de17f2ef8d80435386e8d5a1aabd34)
|
||||
- added raw_is_original handling [`a337e79`](https://github.com/RhetTbull/osxphotos/commit/a337e79e13802b4824c2f088ce9db1c027d6f3c5)
|
||||
- Updated CHANGELOG.md [`22f1e8f`](https://github.com/RhetTbull/osxphotos/commit/22f1e8f2a6478e0576f6bff53e348aad8680ae69)
|
||||
|
||||
#### [0.28.2](https://github.com/RhetTbull/osxphotos/compare/v0.28.1...0.28.2)
|
||||
|
||||
> 18 April 2020
|
||||
|
||||
- Added folder support for Photos <= 4, closes #93 [`#93`](https://github.com/RhetTbull/osxphotos/issues/93)
|
||||
- cleaned up SQL statements in _process_database4 [`6f28171`](https://github.com/RhetTbull/osxphotos/commit/6f281711e2001a63ffad076d7b9835272d5d09da)
|
||||
- Updated CHANGELOG.md [`1fa9583`](https://github.com/RhetTbull/osxphotos/commit/1fa9583ea689d54d2613a064f1ade25bcdfbf043)
|
||||
- Fixed suffix check on export to be case insensitive [`4b30b3b`](https://github.com/RhetTbull/osxphotos/commit/4b30b3b4260e2c7409e18825e5b626efe646db16)
|
||||
- test library update [`3bac106`](https://github.com/RhetTbull/osxphotos/commit/3bac106eb7a180e9e39643a89087d92bf2a437d0)
|
||||
|
||||
#### [v0.28.1](https://github.com/RhetTbull/osxphotos/compare/v0.27.4...v0.28.1)
|
||||
|
||||
> 18 April 2020
|
||||
|
||||
- Initial work on suppport for associated RAW images [`7e42ebb`](https://github.com/RhetTbull/osxphotos/commit/7e42ebb2402d45cd5d20bdd55bddddaa9db4679f)
|
||||
- Initial support for RAW photos in Photos 4 to address issue #101 [`9d15147`](https://github.com/RhetTbull/osxphotos/commit/9d151478d610291b8d482aafae3d445dfd391fca)
|
||||
- replaced CLI option --original-name with --current-name [`36c2821`](https://github.com/RhetTbull/osxphotos/commit/36c2821a0fa62eaaa54cf1edc2d9c6da98155354)
|
||||
|
||||
#### [v0.27.4](https://github.com/RhetTbull/osxphotos/compare/v0.27.3...v0.27.4)
|
||||
|
||||
> 12 April 2020
|
||||
|
||||
- Added {folder_album} to template and --folder to CLI [`b7c7b9f`](https://github.com/RhetTbull/osxphotos/commit/b7c7b9f0664e69c743bdd8a228ad2936cf6b7600)
|
||||
- Test library update [`21e7020`](https://github.com/RhetTbull/osxphotos/commit/21e7020fec406b0f3926d7adc8a1451bfe77e75a)
|
||||
- Updated CHANGELOG.md [`952741d`](https://github.com/RhetTbull/osxphotos/commit/952741d488d2fbbaf8a0c1d3781ad7c4205c068f)
|
||||
|
||||
#### [v0.27.3](https://github.com/RhetTbull/osxphotos/compare/v0.27.1...v0.27.3)
|
||||
|
||||
> 12 April 2020
|
||||
|
||||
- Added additional tests for album_info [`97362fc`](https://github.com/RhetTbull/osxphotos/commit/97362fc0f13b2867abc013f4ba97ae60b0700894)
|
||||
- Fixed bug with handling of deleted albums [`9fef12e`](https://github.com/RhetTbull/osxphotos/commit/9fef12ed37634a7bdb11232976b4b2ddccd1a7cb)
|
||||
|
||||
#### [v0.27.1](https://github.com/RhetTbull/osxphotos/compare/v0.27.0...v0.27.1)
|
||||
|
||||
> 12 April 2020
|
||||
|
||||
- Changed AlbumInfo and FolderInfo interface to maintain backwards compatibility with PhotosDB.albums [`e09f0b4`](https://github.com/RhetTbull/osxphotos/commit/e09f0b40f1671d70ee399cdc519492b04fac8adc)
|
||||
- Updated CHANGELOG.md [`b749681`](https://github.com/RhetTbull/osxphotos/commit/b749681c6d2545eacf653ab1b2a5d1384e3123eb)
|
||||
|
||||
#### [v0.27.0](https://github.com/RhetTbull/osxphotos/compare/v0.26.1...v0.27.0)
|
||||
|
||||
> 11 April 2020
|
||||
|
||||
- Update README.md [`#95`](https://github.com/RhetTbull/osxphotos/pull/95)
|
||||
- Added tests and README for AlbumInfo and FolderInfo [`d6a22b7`](https://github.com/RhetTbull/osxphotos/commit/d6a22b765ab17f6ef1ba8c50b77946f090979968)
|
||||
- Added albuminfo.py for AlbumInfo and FolderInfo classes [`9636572`](https://github.com/RhetTbull/osxphotos/commit/96365728c2ff42abfb6828872ffac53b4c3c8024)
|
||||
- Updated CHANGELOG.md [`cde56e9`](https://github.com/RhetTbull/osxphotos/commit/cde56e9d13baf3098ec85839cf1aaa33b4915ac9)
|
||||
- Update README.md TOC [`8544667`](https://github.com/RhetTbull/osxphotos/commit/8544667c729ea0d7fe39671d909e09cda519e250)
|
||||
|
||||
#### [v0.26.1](https://github.com/RhetTbull/osxphotos/compare/v0.26.0...v0.26.1)
|
||||
|
||||
> 11 April 2020
|
||||
|
||||
- Bug fix for PhotosDB.photos() query [`1c9da5e`](https://github.com/RhetTbull/osxphotos/commit/1c9da5ed6ffa21f0577906b65b7da08951725d1f)
|
||||
- Updated test library [`d74f7f4`](https://github.com/RhetTbull/osxphotos/commit/d74f7f499bf59f37ec81cfa9d49cbbf3aafb5961)
|
||||
- Updated CHANGELOG.md [`c85bb02`](https://github.com/RhetTbull/osxphotos/commit/c85bb023042e072d6688060eb259156c2fa579b9)
|
||||
|
||||
#### [v0.26.0](https://github.com/RhetTbull/osxphotos/compare/v0.25.1...v0.26.0)
|
||||
|
||||
> 11 April 2020
|
||||
|
||||
- Added test for 10.15.4 [`1820715`](https://github.com/RhetTbull/osxphotos/commit/182071584904d001a9b199eef5febfb79e00696e)
|
||||
- Changed PhotosDB albums interface as prep for adding folders [`3e50626`](https://github.com/RhetTbull/osxphotos/commit/3e5062684ab6d706d91d4abeb4e3b0ca47867b70)
|
||||
- Updated CHANGELOG.md [`a6ca3f4`](https://github.com/RhetTbull/osxphotos/commit/a6ca3f453ce0fae4e8d13c7c256ed69a16d2e3f2)
|
||||
|
||||
#### [v0.25.1](https://github.com/RhetTbull/osxphotos/compare/v0.25.0...v0.25.1)
|
||||
|
||||
> 5 April 2020
|
||||
|
||||
- Added --no-extended-attributes option to CLI, closes #85 [`#85`](https://github.com/RhetTbull/osxphotos/issues/85)
|
||||
- Fixed CLI help for invalid topic, closes #76 [`#76`](https://github.com/RhetTbull/osxphotos/issues/76)
|
||||
- Updated test library [`bae0283`](https://github.com/RhetTbull/osxphotos/commit/bae0283441f04d71aa78dbd1cf014f376ef1f91a)
|
||||
|
||||
#### [v0.25.0](https://github.com/RhetTbull/osxphotos/compare/v0.24.2...v0.25.0)
|
||||
|
||||
> 4 April 2020
|
||||
|
||||
- Added places, --place, --no-place to CLI, closes #87, #88 [`#87`](https://github.com/RhetTbull/osxphotos/issues/87)
|
||||
- Updated render_filepath_template to support multiple values [`6a89888`](https://github.com/RhetTbull/osxphotos/commit/6a898886ddadc9d5bc9dbad6ee7365270dd0a26d)
|
||||
- Added {album}, {keyword}, and {person} to template system [`507c4a3`](https://github.com/RhetTbull/osxphotos/commit/507c4a374014f999ca19789bce0df0c14332e021)
|
||||
- Added places command to CLI [`fd5e748`](https://github.com/RhetTbull/osxphotos/commit/fd5e748dca759ea1c3a7329d447f363afe8418b7)
|
||||
- Updated export example [`01cd7fe`](https://github.com/RhetTbull/osxphotos/commit/01cd7fed6d7fc0c61c171a05319c211eb0a9f7c1)
|
||||
- Updated CHANGELOG.md [`daea30f`](https://github.com/RhetTbull/osxphotos/commit/daea30f1626a208209ab6854cbd3b12f4b0a3405)
|
||||
|
||||
#### [v0.24.2](https://github.com/RhetTbull/osxphotos/compare/v0.24.1...v0.24.2)
|
||||
|
||||
> 28 March 2020
|
||||
|
||||
- added {place.country_code} to template system [`be2e167`](https://github.com/RhetTbull/osxphotos/commit/be2e16769d5d2c75af6d7792f1311f5a65c3bc67)
|
||||
|
||||
#### [v0.24.1](https://github.com/RhetTbull/osxphotos/compare/v0.23.4...v0.24.1)
|
||||
|
||||
> 28 March 2020
|
||||
|
||||
- Added detailed place data in PlaceInfo.names [`c06dd42`](https://github.com/RhetTbull/osxphotos/commit/c06dd4233f917f068c087f5604013d371b0a826a)
|
||||
- Template system now supports default values [`67a9a9e`](https://github.com/RhetTbull/osxphotos/commit/67a9a9e21bd05d01a3202b0a1279487f5d04c9d9)
|
||||
- Replaced template renderer with regex-based renderer [`427c4c0`](https://github.com/RhetTbull/osxphotos/commit/427c4c0bc49f671477866d30eee74834c67d7bc5)
|
||||
|
||||
#### [v0.23.4](https://github.com/RhetTbull/osxphotos/compare/v0.23.3...v0.23.4)
|
||||
|
||||
> 22 March 2020
|
||||
|
||||
- Added export_by_album.py to examples [`908fead`](https://github.com/RhetTbull/osxphotos/commit/908fead8a2fbcef3b4a387f34d83d88c507c5939)
|
||||
- Updated CHANGELOG.md [`072e894`](https://github.com/RhetTbull/osxphotos/commit/072e894e56c4dfe5522d073b202933fed0204ef5)
|
||||
- Updated pathvalidate calls [`d066435`](https://github.com/RhetTbull/osxphotos/commit/d066435e3df4062be6a0a3d5fa7308f293e764d5)
|
||||
|
||||
#### [v0.23.3](https://github.com/RhetTbull/osxphotos/compare/v0.23.1...v0.23.3)
|
||||
|
||||
> 22 March 2020
|
||||
|
||||
- Initial version of templating system for CLI [`2feb099`](https://github.com/RhetTbull/osxphotos/commit/2feb0999b3f9ffd9a24e37238f780239a027aa49)
|
||||
- Added __str__ to place [`ad58b03`](https://github.com/RhetTbull/osxphotos/commit/ad58b03f2d31daf33849b141570dd0fb5e0a262e)
|
||||
- Test library updates [`e90d9c6`](https://github.com/RhetTbull/osxphotos/commit/e90d9c6e11fce7a4e4aa348dcc5f57420c0b6c44)
|
||||
|
||||
#### [v0.23.1](https://github.com/RhetTbull/osxphotos/compare/v0.23.0...v0.23.1)
|
||||
|
||||
> 21 March 2020
|
||||
|
||||
- Fixed requirements.txt for bplist2 [`cda5f44`](https://github.com/RhetTbull/osxphotos/commit/cda5f446933ea2272409d1f153e2a7811626ada6)
|
||||
- Updated CHANGELOG.md [`b8da976`](https://github.com/RhetTbull/osxphotos/commit/b8da9765b8949eb90852d249c2877eeb1806d987)
|
||||
- Updated requirements.txt [`9da7ad6`](https://github.com/RhetTbull/osxphotos/commit/9da7ad6dcc021fdafe358d74e1c52f69dc49ade8)
|
||||
|
||||
#### [v0.23.0](https://github.com/RhetTbull/osxphotos/compare/v0.22.23...v0.23.0)
|
||||
|
||||
> 21 March 2020
|
||||
|
||||
- Added PhotoInfo.place for reverse geolocation data [`b338b34`](https://github.com/RhetTbull/osxphotos/commit/b338b34d5055a7621e4ebe4fbbae12227d77af6d)
|
||||
- Updated CHANGELOG.md [`816b98e`](https://github.com/RhetTbull/osxphotos/commit/816b98e617c30d0bdb51bc2413f9915742c8592e)
|
||||
- Update pythonpackage.yml [`92e5bdd`](https://github.com/RhetTbull/osxphotos/commit/92e5bdd2e986e5de2a710abf60ba0dc99c6a6730)
|
||||
|
||||
#### [v0.22.23](https://github.com/RhetTbull/osxphotos/compare/v0.22.21...v0.22.23)
|
||||
|
||||
> 15 March 2020
|
||||
|
||||
- Lots of work on export code [`0940f03`](https://github.com/RhetTbull/osxphotos/commit/0940f039d3e628dc4f25c69bf27ce413807d3f71)
|
||||
- test library update [`1e08a74`](https://github.com/RhetTbull/osxphotos/commit/1e08a7449e69965a37373dadabb37c993d93fc69)
|
||||
|
||||
#### [v0.22.21](https://github.com/RhetTbull/osxphotos/compare/v0.22.17...v0.22.21)
|
||||
|
||||
> 15 March 2020
|
||||
|
||||
- Working on export edited bug for issue #78 [`8542e1a`](https://github.com/RhetTbull/osxphotos/commit/8542e1a97f6b640f287b37af9e50fd05f964ec4d)
|
||||
- Fixed download-missing to only download when actually missing [`dd20b8d`](https://github.com/RhetTbull/osxphotos/commit/dd20b8d8ac3b16d3b72a26b97dcc620b11e3a7c0)
|
||||
- Updated CHANGELOG.md [`cc9220e`](https://github.com/RhetTbull/osxphotos/commit/cc9220e0763816d784f2fd8377dfe14a99981622)
|
||||
|
||||
#### [v0.22.17](https://github.com/RhetTbull/osxphotos/compare/v0.22.16...v0.22.17)
|
||||
|
||||
> 14 March 2020
|
||||
|
||||
- Added MANIFEST.in [`279ab36`](https://github.com/RhetTbull/osxphotos/commit/279ab369295cfe1c778b38e212248271e4fc659e)
|
||||
- version bump [`783e097`](https://github.com/RhetTbull/osxphotos/commit/783e097da35a210a2aa5c75865a8599541b9da0b)
|
||||
|
||||
#### [v0.22.16](https://github.com/RhetTbull/osxphotos/compare/v0.22.13...v0.22.16)
|
||||
|
||||
> 14 March 2020
|
||||
|
||||
- removed activate from --download-missing-photos Applescript, closes #69 [`#69`](https://github.com/RhetTbull/osxphotos/issues/69)
|
||||
- Added media type specials to json and string output, closes #68 [`#68`](https://github.com/RhetTbull/osxphotos/issues/68)
|
||||
- Added query/export options for special media types [`2b7d84a`](https://github.com/RhetTbull/osxphotos/commit/2b7d84a4d103982ad874d875bafbc34d654d539a)
|
||||
- README.md update [`a27ce33`](https://github.com/RhetTbull/osxphotos/commit/a27ce33473df3260dfb7ed26e28295cbf87d1e78)
|
||||
- Test library updates [`2d7d0b8`](https://github.com/RhetTbull/osxphotos/commit/2d7d0b86e0008cae043e314937504f36ad882990)
|
||||
- Fixed bug in --download-missing related to burst images [`1f13ba8`](https://github.com/RhetTbull/osxphotos/commit/1f13ba837fe36ff4eeb48cca02f5312a88a0a765)
|
||||
- test library update [`acb6b9e`](https://github.com/RhetTbull/osxphotos/commit/acb6b9e72f7f6b8f4f1d64b46f270a4d3e984fef)
|
||||
|
||||
#### [v0.22.13](https://github.com/RhetTbull/osxphotos/compare/v0.22.12...v0.22.13)
|
||||
|
||||
> 8 March 2020
|
||||
|
||||
- Added media type specials, closes #60 [`#60`](https://github.com/RhetTbull/osxphotos/issues/60)
|
||||
- Updated CHANGELOG.md [`08a9793`](https://github.com/RhetTbull/osxphotos/commit/08a9793651481e1984a4482794ffedd48e4367a2)
|
||||
- Updated README.md [`1f8fd6e`](https://github.com/RhetTbull/osxphotos/commit/1f8fd6e929cc0edd3dd2f222416454d26955bf2a)
|
||||
|
||||
#### [v0.22.12](https://github.com/RhetTbull/osxphotos/compare/0.22.10...v0.22.12)
|
||||
|
||||
> 7 March 2020
|
||||
|
||||
- Added exiftool [`8dea419`](https://github.com/RhetTbull/osxphotos/commit/8dea41961bad285be7058a68e5f7199e5cfb740e)
|
||||
- Added --exiftool to CLI export [`ef79961`](https://github.com/RhetTbull/osxphotos/commit/ef799610aea67b703a7d056b7eee227534ba78a5)
|
||||
- Updated test library [`9a0fc0d`](https://github.com/RhetTbull/osxphotos/commit/9a0fc0db3e79359610fd0f124a97b03fcf97d8a7)
|
||||
|
||||
#### [0.22.10](https://github.com/RhetTbull/osxphotos/compare/v0.22.9...0.22.10)
|
||||
|
||||
> 8 February 2020
|
||||
|
||||
- Fixed bug in --download-missing to fix issue #64 [`c654e3d`](https://github.com/RhetTbull/osxphotos/commit/c654e3dc61283382b37b6892dab1516ec517143a)
|
||||
- removed commented out code [`69addc3`](https://github.com/RhetTbull/osxphotos/commit/69addc34649f992c6a4a0e0e334754a72530f0ba)
|
||||
- Updated CHANGELOG.md [`1e013b6`](https://github.com/RhetTbull/osxphotos/commit/1e013b6802e49e26ec5a94eb702e841b2eb68395)
|
||||
|
||||
#### [v0.22.9](https://github.com/RhetTbull/osxphotos/compare/v0.22.7...v0.22.9)
|
||||
|
||||
> 1 February 2020
|
||||
@@ -94,7 +387,11 @@ Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
|
||||
- Moved PhotosDB attributes to properties instead of methods [`d95acdf`](https://github.com/RhetTbull/osxphotos/commit/d95acdf9f8764a1720bcba71a6dad29bf668eaf9)
|
||||
- changed interface for export, prepped for exiftool_json_sidecar [`1fe8859`](https://github.com/RhetTbull/osxphotos/commit/1fe885962e8a9a420e776bdd3dc640ca143224b2)
|
||||
|
||||
#### [v0.15.1](https://github.com/RhetTbull/osxphotos/compare/v0.14.21...v0.15.1)
|
||||
#### [v0.15.1](https://github.com/RhetTbull/osxphotos/compare/v0.15.0...v0.15.1)
|
||||
|
||||
> 23 May 2020
|
||||
|
||||
#### [v0.15.0](https://github.com/RhetTbull/osxphotos/compare/v0.14.21...v0.15.0)
|
||||
|
||||
> 14 December 2019
|
||||
|
||||
|
||||
2
MANIFEST.in
Normal file
2
MANIFEST.in
Normal file
@@ -0,0 +1,2 @@
|
||||
include README.md
|
||||
include osxphotos/templates/*
|
||||
19
cli.py
Normal file
19
cli.py
Normal file
@@ -0,0 +1,19 @@
|
||||
""" stand alone command line script for use with pyinstaller
|
||||
|
||||
To build this into an executable:
|
||||
- install pyinstaller:
|
||||
python3 -m pip install pyinstaller
|
||||
- then use make_cli_exe.sh to run pyinstaller or execute the following command:
|
||||
pyinstaller --onefile --hidden-import="pkg_resources.py2_warn" --name osxphotos --add-data osxphotos/templates/xmp_sidecar.mako:osxphotos/templates cli.py
|
||||
|
||||
Resulting executable will be in "dist/osxphotos"
|
||||
|
||||
Note: This is *not* the cli that "python3 -m pip install osxphotos" or "python setup.py install" would install;
|
||||
it's merely a wrapper around __main__.py to allow pyinstaller to work
|
||||
|
||||
"""
|
||||
|
||||
from osxphotos.__main__ import cli
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
86
examples/export_by_album.py
Normal file
86
examples/export_by_album.py
Normal file
@@ -0,0 +1,86 @@
|
||||
""" Export all photos to specified directory using album names as folders
|
||||
If file has been edited, also export the edited version,
|
||||
otherwise, export the original version
|
||||
This will result in duplicate photos if photo is in more than album """
|
||||
|
||||
import os.path
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import click
|
||||
from pathvalidate import is_valid_filepath, sanitize_filepath
|
||||
|
||||
import osxphotos
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument("export_path", type=click.Path(exists=True))
|
||||
@click.option(
|
||||
"--default-album",
|
||||
help="Default folder for photos with no album. Defaults to 'unfiled'",
|
||||
default="unfiled",
|
||||
)
|
||||
@click.option(
|
||||
"--library-path",
|
||||
help="Path to Photos library, default to last used library",
|
||||
default=None,
|
||||
)
|
||||
@click.option(
|
||||
"--edited",
|
||||
help="Also export edited versions of photos (default is originals only)",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
)
|
||||
def export(export_path, default_album, library_path, edited):
|
||||
""" Export all photos, organized by album """
|
||||
export_path = os.path.expanduser(export_path)
|
||||
library_path = os.path.expanduser(library_path) if library_path else None
|
||||
|
||||
if library_path is not None:
|
||||
photosdb = osxphotos.PhotosDB(library_path)
|
||||
else:
|
||||
photosdb = osxphotos.PhotosDB()
|
||||
|
||||
photos = photosdb.photos()
|
||||
|
||||
for p in photos:
|
||||
if not p.ismissing:
|
||||
albums = p.albums
|
||||
if not albums:
|
||||
albums = [default_album]
|
||||
for album in albums:
|
||||
click.echo(f"exporting {p.original_filename} in album {album}")
|
||||
|
||||
# make sure no invalid characters in destination path (could be in album name)
|
||||
album_name = sanitize_filepath(album, platform="auto")
|
||||
|
||||
# create destination folder, if necessary, based on album name
|
||||
dest_dir = os.path.join(export_path, album_name)
|
||||
|
||||
# verify path is a valid path
|
||||
if not is_valid_filepath(dest_dir, platform="auto"):
|
||||
sys.exit(f"Invalid filepath {dest_dir}")
|
||||
|
||||
# create destination dir if needed
|
||||
if not os.path.isdir(dest_dir):
|
||||
os.makedirs(dest_dir)
|
||||
|
||||
filename = p.original_filename
|
||||
# export the photo but only if --edited, photo has adjustments, and
|
||||
# path_edited is not None (can be None if edited photo is missing)
|
||||
if edited and p.hasadjustments and p.path_edited:
|
||||
# export edited version
|
||||
# use original filename with _edited appended but make sure suffix is
|
||||
# same as edited file
|
||||
edited_filename = f"{pathlib.Path(filename).stem}_edited{pathlib.Path(p.path_edited).suffix}"
|
||||
exported = p.export(dest_dir, edited_filename, edited=True)
|
||||
click.echo(f"Exported {edited_filename} to {exported}")
|
||||
# export unedited version
|
||||
exported = p.export(dest_dir, filename)
|
||||
click.echo(f"Exported {filename} to {exported}")
|
||||
else:
|
||||
click.echo(f"Skipping missing photo: {p.original_filename} in album {album}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
export() # pylint: disable=no-value-for-parameter
|
||||
8
make_cli_exe.sh
Executable file
8
make_cli_exe.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This will build an stand-alone executable called 'osxphotos' in your ./dist directory
|
||||
# using pyinstaller
|
||||
# If you need to install pyinstaller:
|
||||
# python3 -m pip install --upgrade pyinstaller
|
||||
|
||||
pyinstaller --onefile --hidden-import="pkg_resources.py2_warn" --name osxphotos --add-data osxphotos/templates/xmp_sidecar.mako:osxphotos/templates cli.py
|
||||
File diff suppressed because it is too large
Load Diff
@@ -13,8 +13,12 @@ import os.path
|
||||
# TODO: Should this also use compatibleBackToVersion from LiGlobals?
|
||||
_TESTED_DB_VERSIONS = ["6000", "4025", "4016", "3301", "2622"]
|
||||
|
||||
# versions later than this have a different database structure
|
||||
_PHOTOS_5_VERSION = "6000"
|
||||
# only version 3 - 4 have RKVersion.selfPortrait
|
||||
_PHOTOS_3_VERSION = "3301"
|
||||
|
||||
# versions 5.0 and later have a different database structure
|
||||
_PHOTOS_4_VERSION = "4025" # latest Mojove version on 10.14.6
|
||||
_PHOTOS_5_VERSION = "6000" # seems to be current on 10.15.1 through 10.15.4
|
||||
|
||||
# which major version operating systems have been tested
|
||||
_TESTED_OS_VERSIONS = ["12", "13", "14", "15"]
|
||||
@@ -22,6 +26,9 @@ _TESTED_OS_VERSIONS = ["12", "13", "14", "15"]
|
||||
# Photos 5 has persons who are empty string if unidentified face
|
||||
_UNKNOWN_PERSON = "_UNKNOWN_"
|
||||
|
||||
# photos with no reverse geolocation info (place)
|
||||
_UNKNOWN_PLACE = "_UNKNOWN_"
|
||||
|
||||
_EXIF_TOOL_URL = "https://exiftool.org/"
|
||||
|
||||
# Where are shared iCloud photos located?
|
||||
@@ -34,3 +41,25 @@ _MOVIE_TYPE = 1
|
||||
# Name of XMP template file
|
||||
_TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), "templates")
|
||||
_XMP_TEMPLATE_NAME = "xmp_sidecar.mako"
|
||||
|
||||
# Constants used for processing folders and albums
|
||||
_PHOTOS_5_ALBUM_KIND = 2 # normal user album
|
||||
_PHOTOS_5_SHARED_ALBUM_KIND = 1505 # shared album
|
||||
_PHOTOS_5_FOLDER_KIND = 4000 # user folder
|
||||
_PHOTOS_5_ROOT_FOLDER_KIND = 3999 # root folder
|
||||
|
||||
_PHOTOS_4_ALBUM_KIND = 3 # RKAlbum.albumSubclass
|
||||
_PHOTOS_4_TOP_LEVEL_ALBUM = "TopLevelAlbums"
|
||||
_PHOTOS_4_ROOT_FOLDER = "LibraryFolder"
|
||||
|
||||
# EXIF related constants
|
||||
# max keyword length for IPTC:Keyword, reference
|
||||
# https://www.iptc.org/std/photometadata/documentation/userguide/
|
||||
_MAX_IPTC_KEYWORD_LEN = 64
|
||||
|
||||
# Sentinel value for detecting if a template in keyword_template doesn't match
|
||||
# If anyone has a keyword matching this, then too bad...
|
||||
_OSXPHOTOS_NONE_SENTINEL = "OSXPhotosXYZZY42_Sentinel$"
|
||||
|
||||
# SearchInfo categories for Photos 5, corresponds to categories in database/search/psi.sqlite
|
||||
SEARCH_CATEGORY_LABEL = 2024
|
||||
|
||||
439
osxphotos/_export_db.py
Normal file
439
osxphotos/_export_db.py
Normal file
@@ -0,0 +1,439 @@
|
||||
""" Helper class for managing a database used by
|
||||
PhotoInfo.export for tracking state of exports and updates
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import sqlite3
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from sqlite3 import Error
|
||||
|
||||
from ._version import __version__
|
||||
|
||||
OSXPHOTOS_EXPORTDB_VERSION = "1.0"
|
||||
|
||||
|
||||
class ExportDB_ABC(ABC):
|
||||
@abstractmethod
|
||||
def get_uuid_for_file(self, filename):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_uuid_for_file(self, filename, uuid):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_stat_orig_for_file(self, filename, stats):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_stat_orig_for_file(self, filename):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_stat_exif_for_file(self, filename, stats):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_stat_exif_for_file(self, filename):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_info_for_uuid(self, uuid):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_info_for_uuid(self, uuid, info):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_exifdata_for_file(self, uuid):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_exifdata_for_file(self, uuid, exifdata):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_data(self, file, uuid, orig_stat, exif_stat, info_json, exif_json):
|
||||
pass
|
||||
|
||||
|
||||
class ExportDBNoOp(ExportDB_ABC):
|
||||
""" An ExportDB with NoOp methods """
|
||||
|
||||
def get_uuid_for_file(self, filename):
|
||||
pass
|
||||
|
||||
def set_uuid_for_file(self, filename, uuid):
|
||||
pass
|
||||
|
||||
def set_stat_orig_for_file(self, filename, stats):
|
||||
pass
|
||||
|
||||
def get_stat_orig_for_file(self, filename):
|
||||
pass
|
||||
|
||||
def set_stat_exif_for_file(self, filename, stats):
|
||||
pass
|
||||
|
||||
def get_stat_exif_for_file(self, filename):
|
||||
pass
|
||||
|
||||
def get_info_for_uuid(self, uuid):
|
||||
pass
|
||||
|
||||
def set_info_for_uuid(self, uuid, info):
|
||||
pass
|
||||
|
||||
def get_exifdata_for_file(self, uuid):
|
||||
pass
|
||||
|
||||
def set_exifdata_for_file(self, uuid, exifdata):
|
||||
pass
|
||||
|
||||
def set_data(self, file, uuid, orig_stat, exif_stat, info_json, exif_json):
|
||||
pass
|
||||
|
||||
|
||||
class ExportDB(ExportDB_ABC):
|
||||
""" Interface to sqlite3 database used to store state information for osxphotos export command """
|
||||
|
||||
def __init__(self, dbfile):
|
||||
""" dbfile: path to osxphotos export database file """
|
||||
self._dbfile = dbfile
|
||||
# _path is parent of the database
|
||||
# all files referenced by get_/set_uuid_for_file will be converted to
|
||||
# relative paths to this parent _path
|
||||
# this allows the entire export tree to be moved to a new disk/location
|
||||
# whilst preserving the UUID to filename mappping
|
||||
self._path = pathlib.Path(dbfile).parent
|
||||
self._conn = self._open_export_db(dbfile)
|
||||
self._insert_run_info()
|
||||
|
||||
def get_uuid_for_file(self, filename):
|
||||
""" query database for filename and return UUID
|
||||
returns None if filename not found in database
|
||||
"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
logging.debug(f"get_uuid: {filename}")
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"SELECT uuid FROM files WHERE filepath_normalized = ?", (filename,)
|
||||
)
|
||||
results = c.fetchone()
|
||||
uuid = results[0] if results else None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
uuid = None
|
||||
|
||||
logging.debug(f"get_uuid: {uuid}")
|
||||
return uuid
|
||||
|
||||
def set_uuid_for_file(self, filename, uuid):
|
||||
""" set UUID of filename to uuid in the database """
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path))
|
||||
filename_normalized = filename.lower()
|
||||
logging.debug(f"set_uuid: {filename} {uuid}")
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"INSERT OR REPLACE INTO files(filepath, filepath_normalized, uuid) VALUES (?, ?, ?);",
|
||||
(filename, filename_normalized, uuid),
|
||||
)
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def set_stat_orig_for_file(self, filename, stats):
|
||||
""" set stat info for filename
|
||||
filename: filename to set the stat info for
|
||||
stat: a tuple of length 3: mode, size, mtime """
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
logging.debug(f"set_stat_orig_for_file: {filename} {stats}")
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"UPDATE files "
|
||||
+ "SET orig_mode = ?, orig_size = ?, orig_mtime = ? "
|
||||
+ "WHERE filepath_normalized = ?;",
|
||||
(*stats, filename),
|
||||
)
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def get_stat_orig_for_file(self, filename):
|
||||
""" get stat info for filename
|
||||
returns: tuple of (mode, size, mtime)
|
||||
"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"SELECT orig_mode, orig_size, orig_mtime FROM files WHERE filepath_normalized = ?",
|
||||
(filename,),
|
||||
)
|
||||
results = c.fetchone()
|
||||
stats = results[0:3] if results else None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
stats = (None, None, None)
|
||||
|
||||
logging.debug(f"get_stat_orig_for_file: {stats}")
|
||||
return stats
|
||||
|
||||
def set_stat_exif_for_file(self, filename, stats):
|
||||
""" set stat info for filename (after exiftool has updated it)
|
||||
filename: filename to set the stat info for
|
||||
stat: a tuple of length 3: mode, size, mtime """
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
if len(stats) != 3:
|
||||
raise ValueError(f"expected 3 elements for stat, got {len(stats)}")
|
||||
|
||||
logging.debug(f"set_stat_exif_for_file: {filename} {stats}")
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"UPDATE files "
|
||||
+ "SET exif_mode = ?, exif_size = ?, exif_mtime = ? "
|
||||
+ "WHERE filepath_normalized = ?;",
|
||||
(*stats, filename),
|
||||
)
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def get_stat_exif_for_file(self, filename):
|
||||
""" get stat info for filename (after exiftool has updated it)
|
||||
returns: tuple of (mode, size, mtime)
|
||||
"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"SELECT exif_mode, exif_size, exif_mtime FROM files WHERE filepath_normalized = ?",
|
||||
(filename,),
|
||||
)
|
||||
results = c.fetchone()
|
||||
stats = results[0:3] if results else None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
stats = (None, None, None)
|
||||
|
||||
logging.debug(f"get_stat_exif_for_file: {stats}")
|
||||
return stats
|
||||
|
||||
def get_info_for_uuid(self, uuid):
|
||||
""" returns the info JSON struct for a UUID """
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute("SELECT json_info FROM info WHERE uuid = ?", (uuid,))
|
||||
results = c.fetchone()
|
||||
info = results[0] if results else None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
info = None
|
||||
|
||||
logging.debug(f"get_info: {uuid}, {info}")
|
||||
return info
|
||||
|
||||
def set_info_for_uuid(self, uuid, info):
|
||||
""" sets the info JSON struct for a UUID """
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"INSERT OR REPLACE INTO info(uuid, json_info) VALUES (?, ?);",
|
||||
(uuid, info),
|
||||
)
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
logging.debug(f"set_info: {uuid}, {info}")
|
||||
|
||||
def get_exifdata_for_file(self, filename):
|
||||
""" returns the exifdata JSON struct for a file """
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"SELECT json_exifdata FROM exifdata WHERE filepath_normalized = ?",
|
||||
(filename,),
|
||||
)
|
||||
results = c.fetchone()
|
||||
exifdata = results[0] if results else None
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
exifdata = None
|
||||
|
||||
logging.debug(f"get_exifdata: {filename}, {exifdata}")
|
||||
return exifdata
|
||||
|
||||
def set_exifdata_for_file(self, filename, exifdata):
|
||||
""" sets the exifdata JSON struct for a file """
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"INSERT OR REPLACE INTO exifdata(filepath_normalized, json_exifdata) VALUES (?, ?);",
|
||||
(filename, exifdata),
|
||||
)
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
logging.debug(f"set_exifdata: {filename}, {exifdata}")
|
||||
|
||||
def set_data(self, file, uuid, orig_stat, exif_stat, info_json, exif_json):
|
||||
""" sets all the data for file and uuid at once
|
||||
calls set_uuid_for_file
|
||||
set_info_for_uuid
|
||||
set_stat_orig_for_file
|
||||
set_stat_exif_for_file
|
||||
set_exifdata_for_file
|
||||
"""
|
||||
filename = str(pathlib.Path(filename).relative_to(self._path)).lower()
|
||||
|
||||
self.set_uuid_for_file(filename, uuid)
|
||||
self.set_info_for_uuid(uuid, info_json)
|
||||
self.set_stat_orig_for_file(filename, orig_stat)
|
||||
self.set_stat_exif_for_file(filename, exif_stat)
|
||||
self.set_exifdata_for_file(filename, exif_json)
|
||||
|
||||
def close(self):
|
||||
""" close the database connection """
|
||||
try:
|
||||
self._conn.close()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def _open_export_db(self, dbfile):
|
||||
""" open export database and return a db connection
|
||||
if dbfile does not exist, will create and initialize the database
|
||||
returns: connection to the database
|
||||
"""
|
||||
|
||||
if not os.path.isfile(dbfile):
|
||||
logging.debug(f"dbfile {dbfile} doesn't exist, creating it")
|
||||
conn = self._get_db_connection(dbfile)
|
||||
if conn:
|
||||
self._create_db_tables(conn)
|
||||
else:
|
||||
raise Exception("Error getting connection to database {dbfile}")
|
||||
else:
|
||||
logging.debug(f"dbfile {dbfile} exists, opening it")
|
||||
conn = self._get_db_connection(dbfile)
|
||||
|
||||
return conn
|
||||
|
||||
def _get_db_connection(self, dbfile):
|
||||
""" return db connection to dbname """
|
||||
try:
|
||||
conn = sqlite3.connect(dbfile)
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
conn = None
|
||||
|
||||
return conn
|
||||
|
||||
def _create_db_tables(self, conn):
|
||||
""" create (if not already created) the necessary db tables for the export database
|
||||
conn: sqlite3 db connection
|
||||
"""
|
||||
sql_commands = {
|
||||
"sql_version_table": """ CREATE TABLE IF NOT EXISTS version (
|
||||
id INTEGER PRIMARY KEY,
|
||||
osxphotos TEXT,
|
||||
exportdb TEXT
|
||||
); """,
|
||||
"sql_files_table": """ CREATE TABLE IF NOT EXISTS files (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath TEXT NOT NULL,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
uuid TEXT,
|
||||
orig_mode INTEGER,
|
||||
orig_size INTEGER,
|
||||
orig_mtime REAL,
|
||||
exif_mode INTEGER,
|
||||
exif_size INTEGER,
|
||||
exif_mtime REAL
|
||||
); """,
|
||||
"sql_runs_table": """ CREATE TABLE IF NOT EXISTS runs (
|
||||
id INTEGER PRIMARY KEY,
|
||||
datetime TEXT,
|
||||
python_path TEXT,
|
||||
script_name TEXT,
|
||||
args TEXT,
|
||||
cwd TEXT
|
||||
); """,
|
||||
"sql_info_table": """ CREATE TABLE IF NOT EXISTS info (
|
||||
id INTEGER PRIMARY KEY,
|
||||
uuid text NOT NULL,
|
||||
json_info JSON
|
||||
); """,
|
||||
"sql_exifdata_table": """ CREATE TABLE IF NOT EXISTS exifdata (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filepath_normalized TEXT NOT NULL,
|
||||
json_exifdata JSON
|
||||
); """,
|
||||
"sql_files_idx": """ CREATE UNIQUE INDEX idx_files_filepath_normalized on files (filepath_normalized); """,
|
||||
"sql_info_idx": """ CREATE UNIQUE INDEX idx_info_uuid on info (uuid); """,
|
||||
"sql_exifdata_idx": """ CREATE UNIQUE INDEX idx_exifdata_filename on exifdata (filepath_normalized); """,
|
||||
}
|
||||
try:
|
||||
c = conn.cursor()
|
||||
for cmd in sql_commands.values():
|
||||
c.execute(cmd)
|
||||
c.execute(
|
||||
"INSERT INTO version(osxphotos, exportdb) VALUES (?, ?);",
|
||||
(__version__, OSXPHOTOS_EXPORTDB_VERSION),
|
||||
)
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def __del__(self):
|
||||
""" ensure the database connection is closed """
|
||||
if self._conn:
|
||||
try:
|
||||
self._conn.close()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
|
||||
def _insert_run_info(self):
|
||||
dt = datetime.datetime.utcnow().isoformat()
|
||||
python_path = sys.executable
|
||||
cmd = sys.argv[0]
|
||||
if len(sys.argv) > 1:
|
||||
args = " ".join(sys.argv[1:])
|
||||
else:
|
||||
args = ""
|
||||
cwd = os.getcwd()
|
||||
conn = self._conn
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
f"INSERT INTO runs (datetime, python_path, script_name, args, cwd) VALUES (?, ?, ?, ?, ?)",
|
||||
(dt, python_path, cmd, args, cwd),
|
||||
)
|
||||
conn.commit()
|
||||
except Error as e:
|
||||
logging.warning(e)
|
||||
54
osxphotos/_filecmp.py
Normal file
54
osxphotos/_filecmp.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Utilities for comparing files
|
||||
|
||||
Modified from CPython/Lib/filecmp.py
|
||||
|
||||
Functions:
|
||||
cmp_file(f1, s2) -> int
|
||||
file_sig(f1) -> Tuple[int, int, float]
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import stat
|
||||
|
||||
__all__ = ["cmp", "sig"]
|
||||
|
||||
|
||||
def cmp_file(f1, s2):
|
||||
"""Compare file f1 to signature s2.
|
||||
|
||||
Arguments:
|
||||
|
||||
f1 -- File name
|
||||
|
||||
s2 -- stats as returned by sig
|
||||
|
||||
Return value:
|
||||
|
||||
True if the files are the same, False otherwise.
|
||||
|
||||
This function uses a cache for past comparisons and the results,
|
||||
with cache entries invalidated if their stat information
|
||||
changes. The cache may be cleared by calling clear_cache().
|
||||
|
||||
"""
|
||||
|
||||
if not s2:
|
||||
return False
|
||||
|
||||
s1 = _sig(os.stat(f1))
|
||||
|
||||
if s1[0] != stat.S_IFREG or s2[0] != stat.S_IFREG:
|
||||
return False
|
||||
if s1 == s2:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _sig(st):
|
||||
return (stat.S_IFMT(st.st_mode), st.st_size, st.st_mtime)
|
||||
|
||||
|
||||
def file_sig(f1):
|
||||
""" return os.stat signature for file f1 """
|
||||
return _sig(os.stat(f1))
|
||||
@@ -1,3 +1,3 @@
|
||||
""" version info """
|
||||
|
||||
__version__ = "0.22.10"
|
||||
__version__ = "0.29.0"
|
||||
|
||||
211
osxphotos/albuminfo.py
Normal file
211
osxphotos/albuminfo.py
Normal file
@@ -0,0 +1,211 @@
|
||||
"""
|
||||
AlbumInfo and FolderInfo classes for dealing with albums and folders
|
||||
|
||||
AlbumInfo class
|
||||
Represents a single Album in the Photos library and provides access to the album's attributes
|
||||
PhotosDB.albums() returns a list of AlbumInfo objects
|
||||
|
||||
FolderInfo class
|
||||
Represents a single Folder in the Photos library and provides access to the folders attributes
|
||||
PhotosDB.folders() returns a list of FolderInfo objects
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from ._constants import (
|
||||
_PHOTOS_4_ALBUM_KIND,
|
||||
_PHOTOS_4_TOP_LEVEL_ALBUM,
|
||||
_PHOTOS_4_VERSION,
|
||||
_PHOTOS_5_ALBUM_KIND,
|
||||
_PHOTOS_5_FOLDER_KIND,
|
||||
)
|
||||
|
||||
|
||||
class AlbumInfo:
|
||||
"""
|
||||
Info about a specific Album, contains all the details about the album
|
||||
including folders, photos, etc.
|
||||
"""
|
||||
|
||||
def __init__(self, db=None, uuid=None):
|
||||
self._uuid = uuid
|
||||
self._db = db
|
||||
self._title = self._db._dbalbum_details[uuid]["title"]
|
||||
|
||||
@property
|
||||
def title(self):
|
||||
""" return title / name of album """
|
||||
return self._title
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
""" return uuid of album """
|
||||
return self._uuid
|
||||
|
||||
@property
|
||||
def photos(self):
|
||||
""" return list of photos contained in album """
|
||||
try:
|
||||
return self._photos
|
||||
except AttributeError:
|
||||
uuid = self._db._dbalbums_album[self._uuid]
|
||||
self._photos = self._db.photos(uuid=uuid)
|
||||
return self._photos
|
||||
|
||||
@property
|
||||
def folder_names(self):
|
||||
""" return hierarchical list of folders the album is contained in
|
||||
the folder list is in form:
|
||||
["Top level folder", "sub folder 1", "sub folder 2", ...]
|
||||
returns empty list if album is not in any folders """
|
||||
|
||||
try:
|
||||
return self._folder_names
|
||||
except AttributeError:
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
self._folder_names = self._db._album_folder_hierarchy_list(self._uuid)
|
||||
else:
|
||||
self._folder_names = self._db._album_folder_hierarchy_list(self._uuid)
|
||||
return self._folder_names
|
||||
|
||||
@property
|
||||
def folder_list(self):
|
||||
""" return hierarchical list of folders the album is contained in
|
||||
as list of FolderInfo objects in form
|
||||
["Top level folder", "sub folder 1", "sub folder 2", ...]
|
||||
returns empty list if album is not in any folders """
|
||||
|
||||
try:
|
||||
return self._folders
|
||||
except AttributeError:
|
||||
self._folders = self._db._album_folder_hierarchy_folderinfo(self._uuid)
|
||||
return self._folders
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
""" returns FolderInfo object for parent folder or None if no parent (e.g. top-level album) """
|
||||
try:
|
||||
return self._parent
|
||||
except AttributeError:
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
parent_uuid = self._db._dbalbum_details[self._uuid]["folderUuid"]
|
||||
self._parent = (
|
||||
FolderInfo(db=self._db, uuid=parent_uuid)
|
||||
if parent_uuid != _PHOTOS_4_TOP_LEVEL_ALBUM
|
||||
else None
|
||||
)
|
||||
else:
|
||||
parent_pk = self._db._dbalbum_details[self._uuid]["parentfolder"]
|
||||
self._parent = (
|
||||
FolderInfo(db=self._db, uuid=self._db._dbalbums_pk[parent_pk])
|
||||
if parent_pk != self._db._folder_root_pk
|
||||
else None
|
||||
)
|
||||
return self._parent
|
||||
|
||||
def __len__(self):
|
||||
""" return number of photos contained in album """
|
||||
return len(self.photos)
|
||||
|
||||
|
||||
class FolderInfo:
|
||||
"""
|
||||
Info about a specific folder, contains all the details about the folder
|
||||
including folders, albums, etc
|
||||
"""
|
||||
|
||||
def __init__(self, db=None, uuid=None):
|
||||
self._uuid = uuid
|
||||
self._db = db
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
self._pk = None
|
||||
self._title = self._db._dbfolder_details[uuid]["name"]
|
||||
else:
|
||||
self._pk = self._db._dbalbum_details[uuid]["pk"]
|
||||
self._title = self._db._dbalbum_details[uuid]["title"]
|
||||
|
||||
@property
|
||||
def title(self):
|
||||
""" return title / name of folder"""
|
||||
return self._title
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
""" return uuid of folder """
|
||||
return self._uuid
|
||||
|
||||
@property
|
||||
def album_info(self):
|
||||
""" return list of albums (as AlbumInfo objects) contained in the folder """
|
||||
try:
|
||||
return self._albums
|
||||
except AttributeError:
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
albums = [
|
||||
AlbumInfo(db=self._db, uuid=album)
|
||||
for album, detail in self._db._dbalbum_details.items()
|
||||
if not detail["intrash"]
|
||||
and detail["albumSubclass"] == _PHOTOS_4_ALBUM_KIND
|
||||
and detail["folderUuid"] == self._uuid
|
||||
]
|
||||
else:
|
||||
albums = [
|
||||
AlbumInfo(db=self._db, uuid=album)
|
||||
for album, detail in self._db._dbalbum_details.items()
|
||||
if not detail["intrash"]
|
||||
and detail["kind"] == _PHOTOS_5_ALBUM_KIND
|
||||
and detail["parentfolder"] == self._pk
|
||||
]
|
||||
self._albums = albums
|
||||
return self._albums
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
""" returns FolderInfo object for parent or None if no parent (e.g. top-level folder) """
|
||||
try:
|
||||
return self._parent
|
||||
except AttributeError:
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
parent_uuid = self._db._dbfolder_details[self._uuid]["parentFolderUuid"]
|
||||
self._parent = (
|
||||
FolderInfo(db=self._db, uuid=parent_uuid)
|
||||
if parent_uuid != _PHOTOS_4_TOP_LEVEL_ALBUM
|
||||
else None
|
||||
)
|
||||
else:
|
||||
parent_pk = self._db._dbalbum_details[self._uuid]["parentfolder"]
|
||||
self._parent = (
|
||||
FolderInfo(db=self._db, uuid=self._db._dbalbums_pk[parent_pk])
|
||||
if parent_pk != self._db._folder_root_pk
|
||||
else None
|
||||
)
|
||||
return self._parent
|
||||
|
||||
@property
|
||||
def subfolders(self):
|
||||
""" return list of folders (as FolderInfo objects) contained in the folder """
|
||||
try:
|
||||
return self._folders
|
||||
except AttributeError:
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
folders = [
|
||||
FolderInfo(db=self._db, uuid=folder)
|
||||
for folder, detail in self._db._dbfolder_details.items()
|
||||
if not detail["intrash"]
|
||||
and not detail["isMagic"]
|
||||
and detail["parentFolderUuid"] == self._uuid
|
||||
]
|
||||
else:
|
||||
folders = [
|
||||
FolderInfo(db=self._db, uuid=album)
|
||||
for album, detail in self._db._dbalbum_details.items()
|
||||
if not detail["intrash"]
|
||||
and detail["kind"] == _PHOTOS_5_FOLDER_KIND
|
||||
and detail["parentfolder"] == self._pk
|
||||
]
|
||||
self._folders = folders
|
||||
return self._folders
|
||||
|
||||
def __len__(self):
|
||||
""" returns count of folders + albums contained in the folder """
|
||||
return len(self.subfolders) + len(self.album_info)
|
||||
52
osxphotos/datetime_formatter.py
Normal file
52
osxphotos/datetime_formatter.py
Normal file
@@ -0,0 +1,52 @@
|
||||
""" Simple formatting of datetime.datetime objects """
|
||||
|
||||
import datetime
|
||||
|
||||
|
||||
class DateTimeFormatter:
|
||||
""" provides property access to formatted datetime.datetime strftime values """
|
||||
|
||||
def __init__(self, dt: datetime.datetime):
|
||||
self.dt = dt
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
""" ISO date in form 2020-03-22 """
|
||||
date = self.dt.date().isoformat()
|
||||
return date
|
||||
|
||||
@property
|
||||
def year(self):
|
||||
""" 4 digit year """
|
||||
year = f"{self.dt.year}"
|
||||
return year
|
||||
|
||||
@property
|
||||
def yy(self):
|
||||
""" 2 digit year """
|
||||
yy = f"{self.dt.strftime('%y')}"
|
||||
return yy
|
||||
|
||||
@property
|
||||
def mm(self):
|
||||
""" 2 digit month """
|
||||
mm = f"{self.dt.strftime('%m')}"
|
||||
return mm
|
||||
|
||||
@property
|
||||
def month(self):
|
||||
""" Month as locale's full name """
|
||||
month = f"{self.dt.strftime('%B')}"
|
||||
return month
|
||||
|
||||
@property
|
||||
def mon(self):
|
||||
""" Month as locale's abbreviated name """
|
||||
mon = f"{self.dt.strftime('%b')}"
|
||||
return mon
|
||||
|
||||
@property
|
||||
def doy(self):
|
||||
""" Julian day of year starting from 001 """
|
||||
doy = f"{self.dt.strftime('%j')}"
|
||||
return doy
|
||||
258
osxphotos/exiftool.py
Normal file
258
osxphotos/exiftool.py
Normal file
@@ -0,0 +1,258 @@
|
||||
""" Yet another simple exiftool wrapper
|
||||
I rolled my own for following reasons:
|
||||
1. I wanted something under MIT license (best alternative was licensed under GPL/BSD)
|
||||
2. I wanted singleton behavior so only a single exiftool process was ever running
|
||||
If these aren't important to you, I highly recommend you use Sven Marnach's excellent
|
||||
pyexiftool: https://github.com/smarnach/pyexiftool which provides more functionality """
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from functools import lru_cache # pylint: disable=syntax-error
|
||||
|
||||
from .utils import _debug
|
||||
|
||||
# exiftool -stay_open commands outputs this EOF marker after command is run
|
||||
EXIFTOOL_STAYOPEN_EOF = "{ready}"
|
||||
EXIFTOOL_STAYOPEN_EOF_LEN = len(EXIFTOOL_STAYOPEN_EOF)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_exiftool_path():
|
||||
""" return path of exiftool, cache result """
|
||||
result = subprocess.run(["which", "exiftool"], stdout=subprocess.PIPE)
|
||||
exiftool_path = result.stdout.decode("utf-8")
|
||||
if _debug():
|
||||
logging.debug("exiftool path = %s" % (exiftool_path))
|
||||
if exiftool_path:
|
||||
return exiftool_path.rstrip()
|
||||
else:
|
||||
raise FileNotFoundError(
|
||||
"Could not find exiftool. Please download and install from "
|
||||
"https://exiftool.org/"
|
||||
)
|
||||
|
||||
|
||||
class _ExifToolProc:
|
||||
""" Runs exiftool in a subprocess via Popen
|
||||
Creates a singleton object """
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
""" create new object or return instance of already created singleton """
|
||||
if not hasattr(cls, "instance") or not cls.instance:
|
||||
cls.instance = super().__new__(cls)
|
||||
|
||||
return cls.instance
|
||||
|
||||
def __init__(self, exiftool=None):
|
||||
""" construct _ExifToolProc singleton object or return instance of already created object
|
||||
exiftool: optional path to exiftool binary (if not provided, will search path to find it) """
|
||||
|
||||
if hasattr(self, "_process_running") and self._process_running:
|
||||
# already running
|
||||
if exiftool is not None:
|
||||
logging.warning(
|
||||
f"exiftool subprocess already running, "
|
||||
f"ignoring exiftool={exiftool}"
|
||||
)
|
||||
return
|
||||
|
||||
if exiftool:
|
||||
self._exiftool = exiftool
|
||||
else:
|
||||
self._exiftool = get_exiftool_path()
|
||||
|
||||
self._process_running = False
|
||||
self._start_proc()
|
||||
|
||||
@property
|
||||
def process(self):
|
||||
""" return the exiftool subprocess """
|
||||
if self._process_running:
|
||||
return self._process
|
||||
else:
|
||||
raise ValueError("exiftool process is not running")
|
||||
|
||||
@property
|
||||
def pid(self):
|
||||
""" return process id (PID) of the exiftool process """
|
||||
return self._process.pid
|
||||
|
||||
@property
|
||||
def exiftool(self):
|
||||
""" return path to exiftool process """
|
||||
return self._exiftool
|
||||
|
||||
def _start_proc(self):
|
||||
""" start exiftool in batch mode """
|
||||
|
||||
if self._process_running:
|
||||
logging.warning("exiftool already running: {self._process}")
|
||||
return
|
||||
|
||||
# open exiftool process
|
||||
self._process = subprocess.Popen(
|
||||
[
|
||||
self._exiftool,
|
||||
"-stay_open", # keep process open in batch mode
|
||||
"True", # -stay_open=True, keep process open in batch mode
|
||||
"-@", # read command-line arguments from file
|
||||
"-", # read from stdin
|
||||
"-common_args", # specifies args common to all commands subsequently run
|
||||
"-n", # no print conversion (e.g. print tag values in machine readable format)
|
||||
"-G", # print group name for each tag
|
||||
],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
self._process_running = True
|
||||
|
||||
def _stop_proc(self):
|
||||
""" stop the exiftool process if it's running, otherwise, do nothing """
|
||||
if not self._process_running:
|
||||
logging.warning("exiftool process is not running")
|
||||
return
|
||||
|
||||
self._process.stdin.write(b"-stay_open\n")
|
||||
self._process.stdin.write(b"False\n")
|
||||
self._process.stdin.flush()
|
||||
try:
|
||||
self._process.communicate(timeout=5)
|
||||
except subprocess.TimeoutExpired:
|
||||
logging.warning(
|
||||
f"exiftool pid {self._process.pid} did not exit, killing it"
|
||||
)
|
||||
self._process.kill()
|
||||
self._process.communicate()
|
||||
|
||||
del self._process
|
||||
self._process_running = False
|
||||
|
||||
def __del__(self):
|
||||
self._stop_proc()
|
||||
|
||||
|
||||
class ExifTool:
|
||||
""" Basic exiftool interface for reading and writing EXIF tags """
|
||||
|
||||
def __init__(self, filepath, exiftool=None, overwrite=True):
|
||||
""" Return ExifTool object
|
||||
file: path to image file
|
||||
exiftool: path to exiftool, if not specified will look in path
|
||||
overwrite: if True, will overwrite image file without creating backup, default=False """
|
||||
self.file = filepath
|
||||
self.overwrite = overwrite
|
||||
self.data = {}
|
||||
self._exiftoolproc = _ExifToolProc(exiftool=exiftool)
|
||||
self._process = self._exiftoolproc.process
|
||||
self._read_exif()
|
||||
|
||||
def setvalue(self, tag, value):
|
||||
""" Set tag to value(s)
|
||||
if value is None, will delete tag """
|
||||
|
||||
if value is None:
|
||||
value = ""
|
||||
command = []
|
||||
command.append(f"-{tag}={value}")
|
||||
if self.overwrite:
|
||||
command.append("-overwrite_original")
|
||||
self.run_commands(*command)
|
||||
|
||||
def addvalues(self, tag, *values):
|
||||
""" Add one or more value(s) to tag
|
||||
If more than one value is passed, each value will be added to the tag
|
||||
Notes: exiftool may add duplicate values for some tags so the caller must ensure
|
||||
the values being added are not already in the EXIF data
|
||||
For some tags, such as IPTC:Keywords, this will add a new value to the list of keywords,
|
||||
but for others, such as EXIF:ISO, this will literally add a value to the existing value.
|
||||
It's up to the caller to know what exiftool will do for each tag
|
||||
If setvalue called before addvalues, exiftool does not appear to add duplicates,
|
||||
but if addvalues called without first calling setvalue, exiftool will add duplicate values
|
||||
"""
|
||||
if not values:
|
||||
raise ValueError("Must pass at least one value")
|
||||
|
||||
command = []
|
||||
for value in values:
|
||||
if value is None:
|
||||
raise ValueError("Can't add None value to tag")
|
||||
command.append(f"-{tag}+={value}")
|
||||
|
||||
if self.overwrite:
|
||||
command.append("-overwrite_original")
|
||||
|
||||
if command:
|
||||
self.run_commands(*command)
|
||||
|
||||
def run_commands(self, *commands, no_file=False):
|
||||
""" run commands in the exiftool process and return result
|
||||
no_file: (bool) do not pass the filename to exiftool (default=False)
|
||||
by default, all commands will be run against self.file
|
||||
use no_file=True to run a command without passing the filename """
|
||||
if not hasattr(self, "_process") or not self._process:
|
||||
raise ValueError("exiftool process is not running")
|
||||
|
||||
if not commands:
|
||||
raise TypeError("must provide one or more command to run")
|
||||
|
||||
filename = os.fsencode(self.file) if not no_file else b""
|
||||
command_str = (
|
||||
b"\n".join([c.encode("utf-8") for c in commands])
|
||||
+ b"\n"
|
||||
+ filename
|
||||
+ b"\n"
|
||||
+ b"-execute\n"
|
||||
)
|
||||
|
||||
if _debug():
|
||||
logging.debug(command_str)
|
||||
|
||||
# send the command
|
||||
self._process.stdin.write(command_str)
|
||||
self._process.stdin.flush()
|
||||
|
||||
# read the output
|
||||
output = b""
|
||||
while EXIFTOOL_STAYOPEN_EOF not in str(output):
|
||||
output += self._process.stdout.readline().strip()
|
||||
return output[:-EXIFTOOL_STAYOPEN_EOF_LEN]
|
||||
|
||||
@property
|
||||
def pid(self):
|
||||
""" return process id (PID) of the exiftool process """
|
||||
return self._process.pid
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
""" returns exiftool version """
|
||||
ver = self.run_commands("-ver", no_file=True)
|
||||
return ver.decode("utf-8")
|
||||
|
||||
def as_dict(self):
|
||||
""" return dictionary of all EXIF tags and values from exiftool
|
||||
returns empty dict if no tags
|
||||
"""
|
||||
json_str = self.run_commands("-json")
|
||||
if json_str:
|
||||
exifdict = json.loads(json_str)
|
||||
return exifdict[0]
|
||||
else:
|
||||
return dict()
|
||||
|
||||
def json(self):
|
||||
""" returns JSON string containing all EXIF tags and values from exiftool """
|
||||
json_str = self.run_commands("-json")
|
||||
return json_str
|
||||
|
||||
def _read_exif(self):
|
||||
""" read exif data from file """
|
||||
data = self.as_dict()
|
||||
self.data = {k: v for k, v in data.items()}
|
||||
|
||||
def __str__(self):
|
||||
str_ = f"file: {self.file}\nexiftool: {self._exiftoolproc._exiftool}"
|
||||
return str_
|
||||
@@ -1,866 +0,0 @@
|
||||
"""
|
||||
PhotoInfo class
|
||||
Represents a single photo in the Photos library and provides access to the photo's attributes
|
||||
PhotosDB.photos() returns a list of PhotoInfo objects
|
||||
"""
|
||||
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import timedelta, timezone
|
||||
from pprint import pformat
|
||||
|
||||
import yaml
|
||||
from mako.template import Template
|
||||
|
||||
from ._constants import (
|
||||
_MOVIE_TYPE,
|
||||
_PHOTO_TYPE,
|
||||
_PHOTOS_5_SHARED_PHOTO_PATH,
|
||||
_PHOTOS_5_VERSION,
|
||||
_TEMPLATE_DIR,
|
||||
_XMP_TEMPLATE_NAME,
|
||||
)
|
||||
from .utils import (
|
||||
_copy_file,
|
||||
_export_photo_uuid_applescript,
|
||||
_get_resource_loc,
|
||||
dd_to_dms_str,
|
||||
)
|
||||
|
||||
|
||||
class PhotoInfo:
|
||||
"""
|
||||
Info about a specific photo, contains all the details about the photo
|
||||
including keywords, persons, albums, uuid, path, etc.
|
||||
"""
|
||||
|
||||
def __init__(self, db=None, uuid=None, info=None):
|
||||
self._uuid = uuid
|
||||
self._info = info
|
||||
self._db = db
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
""" filename of the picture """
|
||||
return self._info["filename"]
|
||||
|
||||
@property
|
||||
def original_filename(self):
|
||||
""" original filename of the picture
|
||||
Photos 5 mangles filenames upon import """
|
||||
return self._info["originalFilename"]
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
""" image creation date as timezone aware datetime object """
|
||||
imagedate = self._info["imageDate"]
|
||||
seconds = self._info["imageTimeZoneOffsetSeconds"] or 0
|
||||
delta = timedelta(seconds=seconds)
|
||||
tz = timezone(delta)
|
||||
imagedate_utc = imagedate.astimezone(tz=tz)
|
||||
return imagedate_utc
|
||||
|
||||
@property
|
||||
def date_modified(self):
|
||||
""" image modification date as timezone aware datetime object
|
||||
or None if no modification date set """
|
||||
imagedate = self._info["lastmodifieddate"]
|
||||
if imagedate:
|
||||
seconds = self._info["imageTimeZoneOffsetSeconds"] or 0
|
||||
delta = timedelta(seconds=seconds)
|
||||
tz = timezone(delta)
|
||||
imagedate_utc = imagedate.astimezone(tz=tz)
|
||||
return imagedate_utc
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def tzoffset(self):
|
||||
""" timezone offset from UTC in seconds """
|
||||
return self._info["imageTimeZoneOffsetSeconds"]
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
""" absolute path on disk of the original picture """
|
||||
|
||||
photopath = None
|
||||
if self._info["isMissing"] == 1:
|
||||
return photopath # path would be meaningless until downloaded
|
||||
|
||||
if self._db._db_version < _PHOTOS_5_VERSION:
|
||||
vol = self._info["volume"]
|
||||
if vol is not None:
|
||||
photopath = os.path.join("/Volumes", vol, self._info["imagePath"])
|
||||
else:
|
||||
photopath = os.path.join(
|
||||
self._db._masters_path, self._info["imagePath"]
|
||||
)
|
||||
return photopath
|
||||
# TODO: Is there a way to use applescript or PhotoKit to force the download in this
|
||||
|
||||
if self._info["shared"]:
|
||||
# shared photo
|
||||
photopath = os.path.join(
|
||||
self._db._library_path,
|
||||
_PHOTOS_5_SHARED_PHOTO_PATH,
|
||||
self._info["directory"],
|
||||
self._info["filename"],
|
||||
)
|
||||
return photopath
|
||||
|
||||
# if self._info["masterFingerprint"]:
|
||||
# if masterFingerprint is not null, path appears to be valid
|
||||
if self._info["directory"].startswith("/"):
|
||||
photopath = os.path.join(self._info["directory"], self._info["filename"])
|
||||
else:
|
||||
photopath = os.path.join(
|
||||
self._db._masters_path, self._info["directory"], self._info["filename"]
|
||||
)
|
||||
return photopath
|
||||
|
||||
# if all else fails, photopath = None
|
||||
# photopath = None
|
||||
# logging.debug(
|
||||
# f"WARNING: photopath None, masterFingerprint null, not shared {pformat(self._info)}"
|
||||
# )
|
||||
# return photopath
|
||||
|
||||
@property
|
||||
def path_edited(self):
|
||||
""" absolute path on disk of the edited picture """
|
||||
""" None if photo has not been edited """
|
||||
|
||||
# TODO: break this code into a _path_edited_4 and _path_edited_5
|
||||
# version to simplify the big if/then; same for path_live_photo
|
||||
|
||||
photopath = None
|
||||
|
||||
if self._db._db_version < _PHOTOS_5_VERSION:
|
||||
if self._info["hasAdjustments"]:
|
||||
edit_id = self._info["edit_resource_id"]
|
||||
if edit_id is not None:
|
||||
library = self._db._library_path
|
||||
folder_id, file_id = _get_resource_loc(edit_id)
|
||||
# todo: is this always true or do we need to search file file_id under folder_id
|
||||
# figure out what kind it is and build filename
|
||||
filename = None
|
||||
if self._info["type"] == _PHOTO_TYPE:
|
||||
# it's a photo
|
||||
filename = f"fullsizeoutput_{file_id}.jpeg"
|
||||
elif self._info["type"] == _MOVIE_TYPE:
|
||||
# it's a movie
|
||||
filename = f"fullsizeoutput_{file_id}.mov"
|
||||
else:
|
||||
# don't know what it is!
|
||||
logging.debug(f"WARNING: unknown type {self._info['type']}")
|
||||
return None
|
||||
|
||||
# photopath appears to usually be in "00" subfolder but
|
||||
# could be elsewhere--I haven't figured out this logic yet
|
||||
# first see if it's in 00
|
||||
photopath = os.path.join(
|
||||
library,
|
||||
"resources",
|
||||
"media",
|
||||
"version",
|
||||
folder_id,
|
||||
"00",
|
||||
filename,
|
||||
)
|
||||
|
||||
if not os.path.isfile(photopath):
|
||||
rootdir = os.path.join(
|
||||
library, "resources", "media", "version", folder_id
|
||||
)
|
||||
|
||||
for dirname, _, filelist in os.walk(rootdir):
|
||||
if filename in filelist:
|
||||
photopath = os.path.join(dirname, filename)
|
||||
break
|
||||
|
||||
# check again to see if we found a valid file
|
||||
if not os.path.isfile(photopath):
|
||||
logging.debug(
|
||||
f"MISSING PATH: edited file for UUID {self._uuid} should be at {photopath} but does not appear to exist"
|
||||
)
|
||||
photopath = None
|
||||
else:
|
||||
logging.debug(
|
||||
f"{self.uuid} hasAdjustments but edit_resource_id is None"
|
||||
)
|
||||
photopath = None
|
||||
else:
|
||||
photopath = None
|
||||
|
||||
# if self._info["isMissing"] == 1:
|
||||
# photopath = None # path would be meaningless until downloaded
|
||||
else:
|
||||
# in Photos 5.0 / Catalina / MacOS 10.15:
|
||||
# edited photos appear to always be converted to .jpeg and stored in
|
||||
# library_name/resources/renders/X/UUID_1_201_a.jpeg
|
||||
# where X = first letter of UUID
|
||||
# and UUID = UUID of image
|
||||
# this seems to be true even for photos not copied to Photos library and
|
||||
# where original format was not jpg/jpeg
|
||||
# if more than one edit, previous edit is stored as UUID_p.jpeg
|
||||
|
||||
if self._info["hasAdjustments"]:
|
||||
library = self._db._library_path
|
||||
directory = self._uuid[0] # first char of uuid
|
||||
filename = None
|
||||
if self._info["type"] == _PHOTO_TYPE:
|
||||
# it's a photo
|
||||
filename = f"{self._uuid}_1_201_a.jpeg"
|
||||
elif self._info["type"] == _MOVIE_TYPE:
|
||||
# it's a movie
|
||||
filename = f"{self._uuid}_2_0_a.mov"
|
||||
else:
|
||||
# don't know what it is!
|
||||
logging.debug(f"WARNING: unknown type {self._info['type']}")
|
||||
return None
|
||||
|
||||
photopath = os.path.join(
|
||||
library, "resources", "renders", directory, filename
|
||||
)
|
||||
|
||||
if not os.path.isfile(photopath):
|
||||
logging.debug(
|
||||
f"edited file for UUID {self._uuid} should be at {photopath} but does not appear to exist"
|
||||
)
|
||||
photopath = None
|
||||
else:
|
||||
photopath = None
|
||||
|
||||
# TODO: might be possible for original/master to be missing but edit to still be there
|
||||
# if self._info["isMissing"] == 1:
|
||||
# photopath = None # path would be meaningless until downloaded
|
||||
|
||||
logging.debug(photopath)
|
||||
|
||||
return photopath
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
""" long / extended description of picture """
|
||||
return self._info["extendedDescription"]
|
||||
|
||||
@property
|
||||
def persons(self):
|
||||
""" list of persons in picture """
|
||||
return self._info["persons"]
|
||||
|
||||
@property
|
||||
def albums(self):
|
||||
""" list of albums picture is contained in """
|
||||
albums = []
|
||||
for album in self._info["albums"]:
|
||||
albums.append(self._db._dbalbum_details[album]["title"])
|
||||
return albums
|
||||
|
||||
@property
|
||||
def keywords(self):
|
||||
""" list of keywords for picture """
|
||||
return self._info["keywords"]
|
||||
|
||||
@property
|
||||
def title(self):
|
||||
""" name / title of picture """
|
||||
return self._info["name"]
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
""" UUID of picture """
|
||||
return self._uuid
|
||||
|
||||
@property
|
||||
def ismissing(self):
|
||||
""" returns true if photo is missing from disk (which means it's not been downloaded from iCloud)
|
||||
NOTE: the photos.db database uses an asynchrounous write-ahead log so changes in Photos
|
||||
do not immediately get written to disk. In particular, I've noticed that downloading
|
||||
an image from the cloud does not force the database to be updated until something else
|
||||
e.g. an edit, keyword, etc. occurs forcing a database synch
|
||||
The exact process / timing is a mystery to be but be aware that if some photos were recently
|
||||
downloaded from cloud to local storate their status in the database might still show
|
||||
isMissing = 1
|
||||
"""
|
||||
return True if self._info["isMissing"] == 1 else False
|
||||
|
||||
@property
|
||||
def hasadjustments(self):
|
||||
""" True if picture has adjustments / edits """
|
||||
return True if self._info["hasAdjustments"] == 1 else False
|
||||
|
||||
@property
|
||||
def external_edit(self):
|
||||
""" Returns True if picture was edited outside of Photos using external editor """
|
||||
return (
|
||||
True
|
||||
if self._info["adjustmentFormatID"] == "com.apple.Photos.externalEdit"
|
||||
else False
|
||||
)
|
||||
|
||||
@property
|
||||
def favorite(self):
|
||||
""" True if picture is marked as favorite """
|
||||
return True if self._info["favorite"] == 1 else False
|
||||
|
||||
@property
|
||||
def hidden(self):
|
||||
""" True if picture is hidden """
|
||||
return True if self._info["hidden"] == 1 else False
|
||||
|
||||
@property
|
||||
def location(self):
|
||||
""" returns (latitude, longitude) as float in degrees or None """
|
||||
return (self._latitude, self._longitude)
|
||||
|
||||
@property
|
||||
def shared(self):
|
||||
""" returns True if photos is in a shared iCloud album otherwise false
|
||||
Only valid on Photos 5; returns None on older versions """
|
||||
if self._db._db_version >= _PHOTOS_5_VERSION:
|
||||
return self._info["shared"]
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def uti(self):
|
||||
""" Returns Uniform Type Identifier (UTI) for the image
|
||||
for example: public.jpeg or com.apple.quicktime-movie
|
||||
"""
|
||||
return self._info["UTI"]
|
||||
|
||||
@property
|
||||
def ismovie(self):
|
||||
""" Returns True if file is a movie, otherwise False
|
||||
"""
|
||||
return True if self._info["type"] == _MOVIE_TYPE else False
|
||||
|
||||
@property
|
||||
def isphoto(self):
|
||||
""" Returns True if file is an image, otherwise False
|
||||
"""
|
||||
return True if self._info["type"] == _PHOTO_TYPE else False
|
||||
|
||||
@property
|
||||
def incloud(self):
|
||||
""" Returns True if photo is cloud asset and is synched to cloud
|
||||
False if photo is cloud asset and not yet synched to cloud
|
||||
None if photo is not cloud asset
|
||||
"""
|
||||
return self._info["incloud"]
|
||||
|
||||
@property
|
||||
def iscloudasset(self):
|
||||
""" Returns True if photo is a cloud asset (in an iCloud library),
|
||||
otherwise False
|
||||
"""
|
||||
if self._db._db_version < _PHOTOS_5_VERSION:
|
||||
return (
|
||||
True
|
||||
if self._info["cloudLibraryState"] is not None
|
||||
and self._info["cloudLibraryState"] != 0
|
||||
else False
|
||||
)
|
||||
else:
|
||||
return True if self._info["cloudAssetGUID"] is not None else False
|
||||
|
||||
@property
|
||||
def burst(self):
|
||||
""" Returns True if photo is part of a Burst photo set, otherwise False """
|
||||
return self._info["burst"]
|
||||
|
||||
@property
|
||||
def burst_photos(self):
|
||||
""" If photo is a burst photo, returns list of PhotoInfo objects
|
||||
that are part of the same burst photo set; otherwise returns empty list.
|
||||
self is not included in the returned list """
|
||||
if self._info["burst"]:
|
||||
burst_uuid = self._info["burstUUID"]
|
||||
burst_photos = [
|
||||
PhotoInfo(db=self._db, uuid=u, info=self._db._dbphotos[u])
|
||||
for u in self._db._dbphotos_burst[burst_uuid]
|
||||
if u != self._uuid
|
||||
]
|
||||
return burst_photos
|
||||
else:
|
||||
return []
|
||||
|
||||
@property
|
||||
def live_photo(self):
|
||||
""" Returns True if photo is a live photo, otherwise False """
|
||||
return self._info["live_photo"]
|
||||
|
||||
@property
|
||||
def path_live_photo(self):
|
||||
""" Returns path to the associated video file for a live photo
|
||||
If photo is not a live photo, returns None
|
||||
If photo is missing, returns None """
|
||||
|
||||
photopath = None
|
||||
if self._db._db_version < _PHOTOS_5_VERSION:
|
||||
if self.live_photo and not self.ismissing:
|
||||
live_model_id = self._info["live_model_id"]
|
||||
if live_model_id == None:
|
||||
logging.debug(f"missing live_model_id: {self._uuid}")
|
||||
photopath = None
|
||||
else:
|
||||
folder_id, file_id = _get_resource_loc(live_model_id)
|
||||
library_path = self._db.library_path
|
||||
photopath = os.path.join(
|
||||
library_path,
|
||||
"resources",
|
||||
"media",
|
||||
"master",
|
||||
folder_id,
|
||||
"00",
|
||||
f"jpegvideocomplement_{file_id}.mov",
|
||||
)
|
||||
if not os.path.isfile(photopath):
|
||||
# In testing, I've seen occasional missing movie for live photo
|
||||
# These appear to be valid -- e.g. live component hasn't been downloaded from iCloud
|
||||
# photos 4 has "isOnDisk" column we could check
|
||||
# or could do the actual check with "isfile"
|
||||
# TODO: should this be a warning or debug?
|
||||
logging.debug(
|
||||
f"MISSING PATH: live photo path for UUID {self._uuid} should be at {photopath} but does not appear to exist"
|
||||
)
|
||||
photopath = None
|
||||
else:
|
||||
photopath = None
|
||||
else:
|
||||
# Photos 5
|
||||
if self.live_photo and not self.ismissing:
|
||||
filename = pathlib.Path(self.path)
|
||||
photopath = filename.parent.joinpath(f"{filename.stem}_3.mov")
|
||||
photopath = str(photopath)
|
||||
if not os.path.isfile(photopath):
|
||||
# In testing, I've seen occasional missing movie for live photo
|
||||
# these appear to be valid -- e.g. video component not yet downloaded from iCloud
|
||||
# TODO: should this be a warning or debug?
|
||||
logging.debug(
|
||||
f"MISSING PATH: live photo path for UUID {self._uuid} should be at {photopath} but does not appear to exist"
|
||||
)
|
||||
photopath = None
|
||||
else:
|
||||
photopath = None
|
||||
|
||||
return photopath
|
||||
|
||||
def export(
|
||||
self,
|
||||
dest,
|
||||
*filename,
|
||||
edited=False,
|
||||
live_photo=False,
|
||||
overwrite=False,
|
||||
increment=True,
|
||||
sidecar_json=False,
|
||||
sidecar_xmp=False,
|
||||
use_photos_export=False,
|
||||
timeout=120,
|
||||
):
|
||||
""" export photo
|
||||
dest: must be valid destination path (or exception raised)
|
||||
filename: (optional): name of picture; if not provided, will use current filename
|
||||
edited: (boolean, default=False); if True will export the edited version of the photo
|
||||
(or raise exception if no edited version)
|
||||
live_photo: (boolean, default=False); if True, will also export the associted .mov for live photos
|
||||
overwrite: (boolean, default=False); if True will overwrite files if they alreay exist
|
||||
increment: (boolean, default=True); if True, will increment file name until a non-existant name is found
|
||||
if overwrite=False and increment=False, export will fail if destination file already exists
|
||||
sidecar_json: (boolean, default = False); if True will also write a json sidecar with IPTC data in format readable by exiftool
|
||||
sidecar filename will be dest/filename.json
|
||||
sidecar_xmp: (boolean, default = False); if True will also write a XMP sidecar with IPTC data
|
||||
sidecar filename will be dest/filename.xmp
|
||||
use_photos_export: (boolean, default=False); if True will attempt to export photo via applescript interaction with Photos
|
||||
timeout: (int, default=120) timeout in seconds used with use_photos_export
|
||||
returns the full path to the exported file """
|
||||
|
||||
# TODO: add this docs:
|
||||
# ( for jpeg in *.jpeg; do exiftool -v -json=$jpeg.json $jpeg; done )
|
||||
|
||||
# check arguments and get destination path and filename (if provided)
|
||||
if filename and len(filename) > 2:
|
||||
raise TypeError(
|
||||
"Too many positional arguments. Should be at most two: destination, filename."
|
||||
)
|
||||
else:
|
||||
# verify destination is a valid path
|
||||
if dest is None:
|
||||
raise ValueError("Destination must not be None")
|
||||
elif not os.path.isdir(dest):
|
||||
raise FileNotFoundError("Invalid path passed to export")
|
||||
|
||||
if filename and len(filename) == 1:
|
||||
# second arg is filename of picture
|
||||
filename = filename[0]
|
||||
else:
|
||||
# no filename provided so use the default
|
||||
# if edited file requested, use filename but add _edited
|
||||
# need to use file extension from edited file as Photos saves a jpeg once edited
|
||||
if edited and not use_photos_export:
|
||||
# verify we have a valid path_edited and use that to get filename
|
||||
if not self.path_edited:
|
||||
raise FileNotFoundError(
|
||||
"edited=True but path_edited is none; hasadjustments: "
|
||||
f" {self.hasadjustments}"
|
||||
)
|
||||
edited_name = pathlib.Path(self.path_edited).name
|
||||
edited_suffix = pathlib.Path(edited_name).suffix
|
||||
filename = (
|
||||
pathlib.Path(self.filename).stem + "_edited" + edited_suffix
|
||||
)
|
||||
else:
|
||||
filename = self.filename
|
||||
|
||||
# check destination path
|
||||
dest = pathlib.Path(dest)
|
||||
filename = pathlib.Path(filename)
|
||||
dest = dest / filename
|
||||
|
||||
# check to see if file exists and if so, add (1), (2), etc until we find one that works
|
||||
# Photos checks the stem and adds (1), (2), etc which avoids collision with sidecars
|
||||
# e.g. exporting sidecar for file1.png and file1.jpeg
|
||||
# if file1.png exists and exporting file1.jpeg,
|
||||
# dest will be file1 (1).jpeg even though file1.jpeg doesn't exist to prevent sidecar collision
|
||||
if increment and not overwrite:
|
||||
count = 1
|
||||
glob_str = str(dest.parent / f"{dest.stem}*")
|
||||
dest_files = glob.glob(glob_str)
|
||||
dest_files = [pathlib.Path(f).stem for f in dest_files]
|
||||
dest_new = dest.stem
|
||||
while dest_new in dest_files:
|
||||
dest_new = f"{dest.stem} ({count})"
|
||||
count += 1
|
||||
dest = dest.parent / f"{dest_new}{dest.suffix}"
|
||||
|
||||
# if overwrite==False and #increment==False, export should fail if file exists
|
||||
if dest.exists() and not overwrite and not increment:
|
||||
raise FileExistsError(
|
||||
f"destination exists ({dest}); overwrite={overwrite}, increment={increment}"
|
||||
)
|
||||
|
||||
if not use_photos_export:
|
||||
# find the source file on disk and export
|
||||
# get path to source file and verify it's not None and is valid file
|
||||
# TODO: how to handle ismissing or not hasadjustments and edited=True cases?
|
||||
if edited:
|
||||
if not self.hasadjustments:
|
||||
logging.warning(
|
||||
"Attempting to export edited photo but hasadjustments=False"
|
||||
)
|
||||
|
||||
if self.path_edited is not None:
|
||||
src = self.path_edited
|
||||
else:
|
||||
raise FileNotFoundError(
|
||||
f"edited=True but path_edited is none; hasadjustments: {self.hasadjustments}"
|
||||
)
|
||||
else:
|
||||
if self.ismissing:
|
||||
logging.warning(
|
||||
f"Attempting to export photo with ismissing=True: path = {self.path}"
|
||||
)
|
||||
|
||||
if self.path is None:
|
||||
logging.warning(
|
||||
f"Attempting to export photo but path is None: ismissing = {self.ismissing}"
|
||||
)
|
||||
raise FileNotFoundError("Cannot export photo if path is None")
|
||||
else:
|
||||
src = self.path
|
||||
|
||||
if not os.path.isfile(src):
|
||||
raise FileNotFoundError(f"{src} does not appear to exist")
|
||||
|
||||
logging.debug(
|
||||
f"exporting {src} to {dest}, overwrite={overwrite}, increment={increment}, dest exists: {dest.exists()}"
|
||||
)
|
||||
|
||||
# copy the file, _copy_file uses ditto to preserve Mac extended attributes
|
||||
_copy_file(src, dest)
|
||||
|
||||
# copy live photo associated .mov if requested
|
||||
if live_photo and self.live_photo:
|
||||
live_name = dest.parent / f"{dest.stem}.mov"
|
||||
src_live = self.path_live_photo
|
||||
|
||||
if src_live is not None:
|
||||
logging.debug(
|
||||
f"Exporting live photo video of {filename} as {live_name.name}"
|
||||
)
|
||||
_copy_file(src_live, str(live_name))
|
||||
else:
|
||||
logging.warning(f"Skipping missing live movie for {filename}")
|
||||
else:
|
||||
# use_photo_export
|
||||
exported = None
|
||||
# export live_photo .mov file?
|
||||
live_photo = True if live_photo and self.live_photo else False
|
||||
if edited:
|
||||
# exported edited version and not original
|
||||
if filename:
|
||||
# use filename stem provided
|
||||
filestem = dest.stem
|
||||
else:
|
||||
# didn't get passed a filename, add _edited
|
||||
filestem = f"{dest.stem}_edited"
|
||||
exported = _export_photo_uuid_applescript(
|
||||
self.uuid,
|
||||
dest.parent,
|
||||
filestem=filestem,
|
||||
original=False,
|
||||
edited=True,
|
||||
live_photo=live_photo,
|
||||
timeout=timeout,
|
||||
)
|
||||
else:
|
||||
# export original version and not edited
|
||||
filestem = dest.stem
|
||||
exported = _export_photo_uuid_applescript(
|
||||
self.uuid,
|
||||
dest.parent,
|
||||
filestem=filestem,
|
||||
original=True,
|
||||
edited=False,
|
||||
live_photo=live_photo,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
if exported is None:
|
||||
logging.warning(f"Error exporting photo {self.uuid} to {dest}")
|
||||
|
||||
if sidecar_json:
|
||||
logging.debug("writing exiftool_json_sidecar")
|
||||
sidecar_filename = dest.parent / pathlib.Path(f"{dest.stem}.json")
|
||||
sidecar_str = self._exiftool_json_sidecar()
|
||||
try:
|
||||
self._write_sidecar(sidecar_filename, sidecar_str)
|
||||
except Exception as e:
|
||||
logging.warning(f"Error writing json sidecar to {sidecar_filename}")
|
||||
raise e
|
||||
|
||||
if sidecar_xmp:
|
||||
logging.debug("writing xmp_sidecar")
|
||||
sidecar_filename = dest.parent / pathlib.Path(f"{dest.stem}.xmp")
|
||||
sidecar_str = self._xmp_sidecar()
|
||||
try:
|
||||
self._write_sidecar(sidecar_filename, sidecar_str)
|
||||
except Exception as e:
|
||||
logging.warning(f"Error writing xmp sidecar to {sidecar_filename}")
|
||||
raise e
|
||||
|
||||
return str(dest)
|
||||
|
||||
def _exiftool_json_sidecar(self):
|
||||
""" return json string of EXIF details in exiftool sidecar format
|
||||
Does not include all the EXIF fields as those are likely already in the image
|
||||
Exports the following:
|
||||
FileName
|
||||
ImageDescription
|
||||
Description
|
||||
Title
|
||||
TagsList
|
||||
Keywords
|
||||
Subject
|
||||
PersonInImage
|
||||
GPSLatitude, GPSLongitude
|
||||
GPSPosition
|
||||
GPSLatitudeRef, GPSLongitudeRef
|
||||
DateTimeOriginal
|
||||
OffsetTimeOriginal
|
||||
ModifyDate """
|
||||
|
||||
exif = {}
|
||||
exif["_CreatedBy"] = "osxphotos, https://github.com/RhetTbull/osxphotos"
|
||||
exif["FileName"] = self.filename
|
||||
|
||||
if self.description:
|
||||
exif["ImageDescription"] = self.description
|
||||
exif["Description"] = self.description
|
||||
|
||||
if self.title:
|
||||
exif["Title"] = self.title
|
||||
|
||||
if self.keywords:
|
||||
exif["TagsList"] = exif["Keywords"] = list(self.keywords)
|
||||
# Photos puts both keywords and persons in Subject when using "Export IPTC as XMP"
|
||||
exif["Subject"] = list(self.keywords)
|
||||
|
||||
if self.persons:
|
||||
exif["PersonInImage"] = self.persons
|
||||
# Photos puts both keywords and persons in Subject when using "Export IPTC as XMP"
|
||||
if "Subject" in exif:
|
||||
exif["Subject"].extend(self.persons)
|
||||
else:
|
||||
exif["Subject"] = self.persons
|
||||
|
||||
# if self.favorite():
|
||||
# exif["Rating"] = 5
|
||||
|
||||
(lat, lon) = self.location
|
||||
if lat is not None and lon is not None:
|
||||
lat_str, lon_str = dd_to_dms_str(lat, lon)
|
||||
exif["GPSLatitude"] = lat_str
|
||||
exif["GPSLongitude"] = lon_str
|
||||
exif["GPSPosition"] = f"{lat_str}, {lon_str}"
|
||||
lat_ref = "North" if lat >= 0 else "South"
|
||||
lon_ref = "East" if lon >= 0 else "West"
|
||||
exif["GPSLatitudeRef"] = lat_ref
|
||||
exif["GPSLongitudeRef"] = lon_ref
|
||||
|
||||
# process date/time and timezone offset
|
||||
date = self.date
|
||||
# exiftool expects format to "2015:01:18 12:00:00"
|
||||
datetimeoriginal = date.strftime("%Y:%m:%d %H:%M:%S")
|
||||
offsettime = date.strftime("%z")
|
||||
# find timezone offset in format "-04:00"
|
||||
offset = re.findall(r"([+-]?)([\d]{2})([\d]{2})", offsettime)
|
||||
offset = offset[0] # findall returns list of tuples
|
||||
offsettime = f"{offset[0]}{offset[1]}:{offset[2]}"
|
||||
exif["DateTimeOriginal"] = datetimeoriginal
|
||||
exif["OffsetTimeOriginal"] = offsettime
|
||||
|
||||
if self.date_modified is not None:
|
||||
exif["ModifyDate"] = self.date_modified.strftime("%Y:%m:%d %H:%M:%S")
|
||||
|
||||
json_str = json.dumps([exif])
|
||||
return json_str
|
||||
|
||||
def _xmp_sidecar(self):
|
||||
""" returns string for XMP sidecar """
|
||||
# TODO: add additional fields to XMP file?
|
||||
|
||||
xmp_template = Template(
|
||||
filename=os.path.join(_TEMPLATE_DIR, _XMP_TEMPLATE_NAME)
|
||||
)
|
||||
xmp_str = xmp_template.render(photo=self)
|
||||
# remove extra lines that mako inserts from template
|
||||
xmp_str = "\n".join(
|
||||
[line for line in xmp_str.split("\n") if line.strip() != ""]
|
||||
)
|
||||
return xmp_str
|
||||
|
||||
def _write_sidecar(self, filename, sidecar_str):
|
||||
""" write sidecar_str to filename
|
||||
used for exporting sidecar info """
|
||||
if not filename and not sidecar_str:
|
||||
raise (
|
||||
ValueError(
|
||||
f"filename {filename} and sidecar_str {sidecar_str} must not be None"
|
||||
)
|
||||
)
|
||||
|
||||
# TODO: catch exception?
|
||||
f = open(filename, "w")
|
||||
f.write(sidecar_str)
|
||||
f.close()
|
||||
|
||||
@property
|
||||
def _longitude(self):
|
||||
""" Returns longitude, in degrees """
|
||||
return self._info["longitude"]
|
||||
|
||||
@property
|
||||
def _latitude(self):
|
||||
""" Returns latitude, in degrees """
|
||||
return self._info["latitude"]
|
||||
|
||||
def __repr__(self):
|
||||
return f"osxphotos.{self.__class__.__name__}(db={self._db}, uuid='{self._uuid}', info={self._info})"
|
||||
|
||||
def __str__(self):
|
||||
""" string representation of PhotoInfo object """
|
||||
|
||||
date_iso = self.date.isoformat()
|
||||
date_modified_iso = (
|
||||
self.date_modified.isoformat() if self.date_modified else None
|
||||
)
|
||||
|
||||
info = {
|
||||
"uuid": self.uuid,
|
||||
"filename": self.filename,
|
||||
"original_filename": self.original_filename,
|
||||
"date": date_iso,
|
||||
"description": self.description,
|
||||
"title": self.title,
|
||||
"keywords": self.keywords,
|
||||
"albums": self.albums,
|
||||
"persons": self.persons,
|
||||
"path": self.path,
|
||||
"ismissing": self.ismissing,
|
||||
"hasadjustments": self.hasadjustments,
|
||||
"external_edit": self.external_edit,
|
||||
"favorite": self.favorite,
|
||||
"hidden": self.hidden,
|
||||
"latitude": self._latitude,
|
||||
"longitude": self._longitude,
|
||||
"path_edited": self.path_edited,
|
||||
"shared": self.shared,
|
||||
"isphoto": self.isphoto,
|
||||
"ismovie": self.ismovie,
|
||||
"uti": self.uti,
|
||||
"burst": self.burst,
|
||||
"live_photo": self.live_photo,
|
||||
"path_live_photo": self.path_live_photo,
|
||||
"iscloudasset": self.iscloudasset,
|
||||
"incloud": self.incloud,
|
||||
"date_modified": date_modified_iso,
|
||||
}
|
||||
return yaml.dump(info, sort_keys=False)
|
||||
|
||||
def json(self):
|
||||
""" return JSON representation """
|
||||
|
||||
date_modified_iso = (
|
||||
self.date_modified.isoformat() if self.date_modified else None
|
||||
)
|
||||
|
||||
pic = {
|
||||
"uuid": self.uuid,
|
||||
"filename": self.filename,
|
||||
"original_filename": self.original_filename,
|
||||
"date": self.date.isoformat(),
|
||||
"description": self.description,
|
||||
"title": self.title,
|
||||
"keywords": self.keywords,
|
||||
"albums": self.albums,
|
||||
"persons": self.persons,
|
||||
"path": self.path,
|
||||
"ismissing": self.ismissing,
|
||||
"hasadjustments": self.hasadjustments,
|
||||
"external_edit": self.external_edit,
|
||||
"favorite": self.favorite,
|
||||
"hidden": self.hidden,
|
||||
"latitude": self._latitude,
|
||||
"longitude": self._longitude,
|
||||
"path_edited": self.path_edited,
|
||||
"shared": self.shared,
|
||||
"isphoto": self.isphoto,
|
||||
"ismovie": self.ismovie,
|
||||
"uti": self.uti,
|
||||
"burst": self.burst,
|
||||
"live_photo": self.live_photo,
|
||||
"path_live_photo": self.path_live_photo,
|
||||
"iscloudasset": self.iscloudasset,
|
||||
"incloud": self.incloud,
|
||||
"date_modified": date_modified_iso,
|
||||
}
|
||||
return json.dumps(pic)
|
||||
|
||||
# compare two PhotoInfo objects for equality
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
9
osxphotos/photoinfo/__init__.py
Normal file
9
osxphotos/photoinfo/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""
|
||||
PhotoInfo class
|
||||
Represents a single photo in the Photos library and provides access to the photo's attributes
|
||||
PhotosDB.photos() returns a list of PhotoInfo objects
|
||||
"""
|
||||
|
||||
from ._photoinfo_exifinfo import ExifInfo
|
||||
from ._photoinfo_export import ExportResults
|
||||
from .photoinfo import PhotoInfo
|
||||
94
osxphotos/photoinfo/_photoinfo_exifinfo.py
Normal file
94
osxphotos/photoinfo/_photoinfo_exifinfo.py
Normal file
@@ -0,0 +1,94 @@
|
||||
""" PhotoInfo methods to expose EXIF info from the library """
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .._constants import _PHOTOS_4_VERSION
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ExifInfo:
|
||||
""" EXIF info associated with a photo from the Photos library """
|
||||
|
||||
flash_fired: bool
|
||||
iso: int
|
||||
metering_mode: int
|
||||
sample_rate: int
|
||||
track_format: int
|
||||
white_balance: int
|
||||
aperture: float
|
||||
bit_rate: float
|
||||
duration: float
|
||||
exposure_bias: float
|
||||
focal_length: float
|
||||
fps: float
|
||||
latitude: float
|
||||
longitude: float
|
||||
shutter_speed: float
|
||||
camera_make: str
|
||||
camera_model: str
|
||||
codec: str
|
||||
lens_model: str
|
||||
|
||||
|
||||
@property
|
||||
def exif_info(self):
|
||||
""" Returns an ExifInfo object with the EXIF data for photo
|
||||
Note: the returned EXIF data is the data Photos stores in the database on import;
|
||||
ExifInfo does not provide access to the EXIF info in the actual image file
|
||||
Some or all of the fields may be None
|
||||
Only valid for Photos 5; on earlier database returns None
|
||||
"""
|
||||
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
logging.debug(f"exif_info not implemented for this database version")
|
||||
return None
|
||||
|
||||
try:
|
||||
exif = self._db._db_exifinfo_uuid[self.uuid]
|
||||
exif_info = ExifInfo(
|
||||
iso=exif["ZISO"],
|
||||
flash_fired=True if exif["ZFLASHFIRED"] == 1 else False,
|
||||
metering_mode=exif["ZMETERINGMODE"],
|
||||
sample_rate=exif["ZSAMPLERATE"],
|
||||
track_format=exif["ZTRACKFORMAT"],
|
||||
white_balance=exif["ZWHITEBALANCE"],
|
||||
aperture=exif["ZAPERTURE"],
|
||||
bit_rate=exif["ZBITRATE"],
|
||||
duration=exif["ZDURATION"],
|
||||
exposure_bias=exif["ZEXPOSUREBIAS"],
|
||||
focal_length=exif["ZFOCALLENGTH"],
|
||||
fps=exif["ZFPS"],
|
||||
latitude=exif["ZLATITUDE"],
|
||||
longitude=exif["ZLONGITUDE"],
|
||||
shutter_speed=exif["ZSHUTTERSPEED"],
|
||||
camera_make=exif["ZCAMERAMAKE"],
|
||||
camera_model=exif["ZCAMERAMODEL"],
|
||||
codec=exif["ZCODEC"],
|
||||
lens_model=exif["ZLENSMODEL"],
|
||||
)
|
||||
except KeyError:
|
||||
logging.debug(f"Could not find exif record for uuid {self.uuid}")
|
||||
exif_info = ExifInfo(
|
||||
iso=None,
|
||||
flash_fired=None,
|
||||
metering_mode=None,
|
||||
sample_rate=None,
|
||||
track_format=None,
|
||||
white_balance=None,
|
||||
aperture=None,
|
||||
bit_rate=None,
|
||||
duration=None,
|
||||
exposure_bias=None,
|
||||
focal_length=None,
|
||||
fps=None,
|
||||
latitude=None,
|
||||
longitude=None,
|
||||
shutter_speed=None,
|
||||
camera_make=None,
|
||||
camera_model=None,
|
||||
codec=None,
|
||||
lens_model=None,
|
||||
)
|
||||
|
||||
return exif_info
|
||||
33
osxphotos/photoinfo/_photoinfo_exiftool.py
Normal file
33
osxphotos/photoinfo/_photoinfo_exiftool.py
Normal file
@@ -0,0 +1,33 @@
|
||||
""" Implementation for PhotoInfo.exiftool property which returns ExifTool object for a photo """
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ..exiftool import ExifTool, get_exiftool_path
|
||||
|
||||
@property
|
||||
def exiftool(self):
|
||||
""" Returns an ExifTool object for the photo
|
||||
requires that exiftool (https://exiftool.org/) be installed
|
||||
If exiftool not installed, logs warning and returns None
|
||||
If photo path is missing, returns None
|
||||
"""
|
||||
try:
|
||||
# return the memoized instance if it exists
|
||||
return self._exiftool
|
||||
except AttributeError:
|
||||
try:
|
||||
exiftool_path = get_exiftool_path()
|
||||
if self.path is not None and os.path.isfile(self.path):
|
||||
exiftool = ExifTool(self.path)
|
||||
else:
|
||||
exiftool = None
|
||||
logging.debug(f"exiftool: missing path {self.uuid}")
|
||||
except FileNotFoundError:
|
||||
# get_exiftool_path raises FileNotFoundError if exiftool not found
|
||||
exiftool = None
|
||||
logging.warning(f"exiftool not in path; download and install from https://exiftool.org/")
|
||||
|
||||
self._exiftool = exiftool
|
||||
return self._exiftool
|
||||
|
||||
999
osxphotos/photoinfo/_photoinfo_export.py
Normal file
999
osxphotos/photoinfo/_photoinfo_export.py
Normal file
@@ -0,0 +1,999 @@
|
||||
""" export methods for PhotoInfo """
|
||||
|
||||
# TODO: should this be its own PhotoExporter class?
|
||||
|
||||
import filecmp
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
from collections import namedtuple # pylint: disable=syntax-error
|
||||
|
||||
from mako.template import Template
|
||||
|
||||
from .._constants import (
|
||||
_MAX_IPTC_KEYWORD_LEN,
|
||||
_OSXPHOTOS_NONE_SENTINEL,
|
||||
_TEMPLATE_DIR,
|
||||
_UNKNOWN_PERSON,
|
||||
_XMP_TEMPLATE_NAME,
|
||||
)
|
||||
from ..exiftool import ExifTool
|
||||
from .._export_db import ExportDBNoOp
|
||||
from .._filecmp import cmp_file, file_sig
|
||||
from ..utils import (
|
||||
_copy_file,
|
||||
_export_photo_uuid_applescript,
|
||||
_hardlink_file,
|
||||
dd_to_dms_str,
|
||||
)
|
||||
|
||||
ExportResults = namedtuple(
|
||||
"ExportResults", ["exported", "new", "updated", "skipped", "exif_updated"]
|
||||
)
|
||||
|
||||
|
||||
def export(
|
||||
self,
|
||||
dest,
|
||||
*filename,
|
||||
edited=False,
|
||||
live_photo=False,
|
||||
raw_photo=False,
|
||||
export_as_hardlink=False,
|
||||
overwrite=False,
|
||||
increment=True,
|
||||
sidecar_json=False,
|
||||
sidecar_xmp=False,
|
||||
use_photos_export=False,
|
||||
timeout=120,
|
||||
exiftool=False,
|
||||
no_xattr=False,
|
||||
use_albums_as_keywords=False,
|
||||
use_persons_as_keywords=False,
|
||||
keyword_template=None,
|
||||
):
|
||||
""" export photo
|
||||
dest: must be valid destination path (or exception raised)
|
||||
filename: (optional): name of exported picture; if not provided, will use current filename
|
||||
**NOTE**: if provided, user must ensure file extension (suffix) is correct.
|
||||
For example, if photo is .CR2 file, edited image may be .jpeg.
|
||||
If you provide an extension different than what the actual file is,
|
||||
export will print a warning but will happily export the photo using the
|
||||
incorrect file extension. e.g. to get the extension of the edited photo,
|
||||
reference PhotoInfo.path_edited
|
||||
edited: (boolean, default=False); if True will export the edited version of the photo
|
||||
(or raise exception if no edited version)
|
||||
live_photo: (boolean, default=False); if True, will also export the associted .mov for live photos
|
||||
raw_photo: (boolean, default=False); if True, will also export the associted RAW photo
|
||||
export_as_hardlink: (boolean, default=False); if True, will hardlink files instead of copying them
|
||||
overwrite: (boolean, default=False); if True will overwrite files if they alreay exist
|
||||
increment: (boolean, default=True); if True, will increment file name until a non-existant name is found
|
||||
if overwrite=False and increment=False, export will fail if destination file already exists
|
||||
sidecar_json: (boolean, default = False); if True will also write a json sidecar with IPTC data in format readable by exiftool
|
||||
sidecar filename will be dest/filename.json
|
||||
sidecar_xmp: (boolean, default = False); if True will also write a XMP sidecar with IPTC data
|
||||
sidecar filename will be dest/filename.xmp
|
||||
use_photos_export: (boolean, default=False); if True will attempt to export photo via applescript interaction with Photos
|
||||
timeout: (int, default=120) timeout in seconds used with use_photos_export
|
||||
exiftool: (boolean, default = False); if True, will use exiftool to write metadata to export file
|
||||
no_xattr: (boolean, default = False); if True, exports file without preserving extended attributes
|
||||
returns list of full paths to the exported files
|
||||
use_albums_as_keywords: (boolean, default = False); if True, will include album names in keywords
|
||||
when exporting metadata with exiftool or sidecar
|
||||
use_persons_as_keywords: (boolean, default = False); if True, will include person names in keywords
|
||||
when exporting metadata with exiftool or sidecar
|
||||
keyword_template: (list of strings); list of template strings that will be rendered as used as keywords
|
||||
returns: list of photos exported
|
||||
"""
|
||||
|
||||
# Implementation note: calls export2 to actually do the work
|
||||
|
||||
results = self.export2(
|
||||
dest,
|
||||
*filename,
|
||||
edited=edited,
|
||||
live_photo=live_photo,
|
||||
raw_photo=raw_photo,
|
||||
export_as_hardlink=export_as_hardlink,
|
||||
overwrite=overwrite,
|
||||
increment=increment,
|
||||
sidecar_json=sidecar_json,
|
||||
sidecar_xmp=sidecar_xmp,
|
||||
use_photos_export=use_photos_export,
|
||||
timeout=timeout,
|
||||
exiftool=exiftool,
|
||||
no_xattr=no_xattr,
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
)
|
||||
|
||||
return results.exported
|
||||
|
||||
|
||||
def export2(
|
||||
self,
|
||||
dest,
|
||||
*filename,
|
||||
edited=False,
|
||||
live_photo=False,
|
||||
raw_photo=False,
|
||||
export_as_hardlink=False,
|
||||
overwrite=False,
|
||||
increment=True,
|
||||
sidecar_json=False,
|
||||
sidecar_xmp=False,
|
||||
use_photos_export=False,
|
||||
timeout=120,
|
||||
exiftool=False,
|
||||
no_xattr=False,
|
||||
use_albums_as_keywords=False,
|
||||
use_persons_as_keywords=False,
|
||||
keyword_template=None,
|
||||
update=False,
|
||||
export_db=None,
|
||||
):
|
||||
""" export photo
|
||||
dest: must be valid destination path (or exception raised)
|
||||
filename: (optional): name of exported picture; if not provided, will use current filename
|
||||
**NOTE**: if provided, user must ensure file extension (suffix) is correct.
|
||||
For example, if photo is .CR2 file, edited image may be .jpeg.
|
||||
If you provide an extension different than what the actual file is,
|
||||
export will print a warning but will happily export the photo using the
|
||||
incorrect file extension. e.g. to get the extension of the edited photo,
|
||||
reference PhotoInfo.path_edited
|
||||
edited: (boolean, default=False); if True will export the edited version of the photo
|
||||
(or raise exception if no edited version)
|
||||
live_photo: (boolean, default=False); if True, will also export the associted .mov for live photos
|
||||
raw_photo: (boolean, default=False); if True, will also export the associted RAW photo
|
||||
export_as_hardlink: (boolean, default=False); if True, will hardlink files instead of copying them
|
||||
overwrite: (boolean, default=False); if True will overwrite files if they alreay exist
|
||||
increment: (boolean, default=True); if True, will increment file name until a non-existant name is found
|
||||
if overwrite=False and increment=False, export will fail if destination file already exists
|
||||
sidecar_json: (boolean, default = False); if True will also write a json sidecar with IPTC data in format readable by exiftool
|
||||
sidecar filename will be dest/filename.json
|
||||
sidecar_xmp: (boolean, default = False); if True will also write a XMP sidecar with IPTC data
|
||||
sidecar filename will be dest/filename.xmp
|
||||
use_photos_export: (boolean, default=False); if True will attempt to export photo via applescript interaction with Photos
|
||||
timeout: (int, default=120) timeout in seconds used with use_photos_export
|
||||
exiftool: (boolean, default = False); if True, will use exiftool to write metadata to export file
|
||||
no_xattr: (boolean, default = False); if True, exports file without preserving extended attributes
|
||||
returns list of full paths to the exported files
|
||||
use_albums_as_keywords: (boolean, default = False); if True, will include album names in keywords
|
||||
when exporting metadata with exiftool or sidecar
|
||||
use_persons_as_keywords: (boolean, default = False); if True, will include person names in keywords
|
||||
when exporting metadata with exiftool or sidecar
|
||||
keyword_template: (list of strings); list of template strings that will be rendered as used as keywords
|
||||
update: (boolean, default=False); if True export will run in update mode, that is, it will
|
||||
not export the photo if the current version already exists in the destination
|
||||
export_db: (ExportDB_ABC); instance of a class that conforms to ExportDB_ABC with methods
|
||||
for getting/setting data related to exported files to compare update state
|
||||
Returns: ExportResults namedtuple with fields: exported, new, updated, skipped
|
||||
where each field is a list of file paths
|
||||
"""
|
||||
|
||||
# if update, caller may pass function refs to get/set uuid for file being exported
|
||||
# and for setting/getting the PhotoInfo json info for an exported file
|
||||
if export_db is None:
|
||||
export_db = ExportDBNoOp()
|
||||
|
||||
# suffix to add to edited files
|
||||
# e.g. name will be filename_edited.jpg
|
||||
edited_identifier = "_edited"
|
||||
|
||||
# list of all files exported during this call to export
|
||||
exported_files = []
|
||||
|
||||
# list of new files during update
|
||||
update_new_files = []
|
||||
|
||||
# list of files that were updated
|
||||
update_updated_files = []
|
||||
|
||||
# list of all files skipped because they do not need to be updated (for use with update=True)
|
||||
update_skipped_files = []
|
||||
|
||||
# check edited and raise exception trying to export edited version of
|
||||
# photo that hasn't been edited
|
||||
if edited and not self.hasadjustments:
|
||||
raise ValueError(
|
||||
"Photo does not have adjustments, cannot export edited version"
|
||||
)
|
||||
|
||||
# check arguments and get destination path and filename (if provided)
|
||||
if filename and len(filename) > 2:
|
||||
raise TypeError(
|
||||
"Too many positional arguments. Should be at most two: destination, filename."
|
||||
)
|
||||
else:
|
||||
# verify destination is a valid path
|
||||
if dest is None:
|
||||
raise ValueError("Destination must not be None")
|
||||
elif not os.path.isdir(dest):
|
||||
raise FileNotFoundError("Invalid path passed to export")
|
||||
|
||||
if filename and len(filename) == 1:
|
||||
# if filename passed, use it
|
||||
fname = filename[0]
|
||||
else:
|
||||
# no filename provided so use the default
|
||||
# if edited file requested, use filename but add _edited
|
||||
# need to use file extension from edited file as Photos saves a jpeg once edited
|
||||
if edited and not use_photos_export:
|
||||
# verify we have a valid path_edited and use that to get filename
|
||||
if not self.path_edited:
|
||||
raise FileNotFoundError(
|
||||
"edited=True but path_edited is none; hasadjustments: "
|
||||
f" {self.hasadjustments}"
|
||||
)
|
||||
edited_name = pathlib.Path(self.path_edited).name
|
||||
edited_suffix = pathlib.Path(edited_name).suffix
|
||||
fname = (
|
||||
pathlib.Path(self.filename).stem + edited_identifier + edited_suffix
|
||||
)
|
||||
else:
|
||||
fname = self.filename
|
||||
|
||||
# check destination path
|
||||
dest = pathlib.Path(dest)
|
||||
fname = pathlib.Path(fname)
|
||||
dest = dest / fname
|
||||
|
||||
# check extension of destination
|
||||
if edited and self.path_edited is not None:
|
||||
# use suffix from edited file
|
||||
actual_suffix = pathlib.Path(self.path_edited).suffix
|
||||
elif edited:
|
||||
# use .jpeg as that's probably correct
|
||||
# if edited and path_edited is None, will raise FileNotFoundError below
|
||||
# unless use_photos_export is True
|
||||
actual_suffix = ".jpeg"
|
||||
else:
|
||||
# use suffix from the non-edited file
|
||||
actual_suffix = pathlib.Path(self.filename).suffix
|
||||
|
||||
# warn if suffixes don't match but ignore .JPG / .jpeg as
|
||||
# Photo's often converts .JPG to .jpeg
|
||||
suffixes = sorted([x.lower() for x in [dest.suffix, actual_suffix]])
|
||||
if dest.suffix.lower() != actual_suffix.lower() and suffixes != [".jpeg", ".jpg"]:
|
||||
logging.warning(
|
||||
f"Invalid destination suffix: {dest.suffix}, should be {actual_suffix}"
|
||||
)
|
||||
|
||||
# check to see if file exists and if so, add (1), (2), etc until we find one that works
|
||||
# Photos checks the stem and adds (1), (2), etc which avoids collision with sidecars
|
||||
# e.g. exporting sidecar for file1.png and file1.jpeg
|
||||
# if file1.png exists and exporting file1.jpeg,
|
||||
# dest will be file1 (1).jpeg even though file1.jpeg doesn't exist to prevent sidecar collision
|
||||
if not update and increment and not overwrite:
|
||||
count = 1
|
||||
glob_str = str(dest.parent / f"{dest.stem}*")
|
||||
dest_files = glob.glob(glob_str)
|
||||
dest_files = [pathlib.Path(f).stem for f in dest_files]
|
||||
dest_new = dest.stem
|
||||
while dest_new in dest_files:
|
||||
dest_new = f"{dest.stem} ({count})"
|
||||
count += 1
|
||||
dest = dest.parent / f"{dest_new}{dest.suffix}"
|
||||
|
||||
# TODO: need way to check if DB is missing, try to find the right photo anyway by seeing if they're the same and then updating
|
||||
# move the checks into "if not use_photos_export" block below
|
||||
# if use_photos_export is True then we'll export wether destination exists or not
|
||||
|
||||
# if overwrite==False and #increment==False, export should fail if file exists
|
||||
if dest.exists() and not update and not overwrite and not increment:
|
||||
raise FileExistsError(
|
||||
f"destination exists ({dest}); overwrite={overwrite}, increment={increment}"
|
||||
)
|
||||
|
||||
if not use_photos_export:
|
||||
# find the source file on disk and export
|
||||
# get path to source file and verify it's not None and is valid file
|
||||
# TODO: how to handle ismissing or not hasadjustments and edited=True cases?
|
||||
if edited:
|
||||
if self.path_edited is not None:
|
||||
src = self.path_edited
|
||||
else:
|
||||
raise FileNotFoundError(
|
||||
f"Cannot export edited photo if path_edited is None"
|
||||
)
|
||||
else:
|
||||
if self.ismissing:
|
||||
logging.debug(
|
||||
f"Attempting to export photo with ismissing=True: path = {self.path}"
|
||||
)
|
||||
|
||||
if self.path is not None:
|
||||
src = self.path
|
||||
else:
|
||||
raise FileNotFoundError("Cannot export photo if path is None")
|
||||
|
||||
if not os.path.isfile(src):
|
||||
raise FileNotFoundError(f"{src} does not appear to exist")
|
||||
|
||||
logging.debug(
|
||||
f"exporting {src} to {dest}, overwrite={overwrite}, increment={increment}, dest exists: {dest.exists()}"
|
||||
)
|
||||
|
||||
# found source now try to find right destination
|
||||
if update and dest.exists():
|
||||
# destination exists, check to see if destination is the right UUID
|
||||
dest_uuid = export_db.get_uuid_for_file(dest)
|
||||
if dest_uuid is None and filecmp.cmp(src, dest):
|
||||
# might be exporting into a pre-ExportDB folder or the DB got deleted
|
||||
logging.debug(
|
||||
f"Found matching file with blank uuid: {self.uuid}, {dest}"
|
||||
)
|
||||
dest_uuid = self.uuid
|
||||
export_db.set_uuid_for_file(dest, self.uuid)
|
||||
export_db.set_info_for_uuid(self.uuid, self.json())
|
||||
export_db.set_stat_orig_for_file(dest, file_sig(dest))
|
||||
export_db.set_stat_exif_for_file(dest, (None, None, None))
|
||||
export_db.set_exifdata_for_file(dest, None)
|
||||
if dest_uuid != self.uuid:
|
||||
# not the right file, find the right one
|
||||
logging.debug(
|
||||
f"Need to find right photo: uuid={self.uuid}, dest={dest_uuid}, dest={dest}, path={self.path}"
|
||||
)
|
||||
count = 1
|
||||
glob_str = str(dest.parent / f"{dest.stem} (*{dest.suffix}")
|
||||
dest_files = glob.glob(glob_str)
|
||||
found_match = False
|
||||
for file_ in dest_files:
|
||||
dest_uuid = export_db.get_uuid_for_file(file_)
|
||||
if dest_uuid == self.uuid:
|
||||
logging.debug(
|
||||
f"Found matching file for uuid: {dest_uuid}, {file_}"
|
||||
)
|
||||
dest = pathlib.Path(file_)
|
||||
found_match = True
|
||||
break
|
||||
elif dest_uuid is None and filecmp.cmp(src, file_):
|
||||
# files match, update the UUID
|
||||
logging.debug(
|
||||
f"Found matching file with blank uuid: {self.uuid}, {file_}"
|
||||
)
|
||||
dest = pathlib.Path(file_)
|
||||
found_match = True
|
||||
export_db.set_uuid_for_file(file_, self.uuid)
|
||||
export_db.set_info_for_uuid(self.uuid, self.json())
|
||||
export_db.set_stat_orig_for_file(dest, file_sig(dest))
|
||||
export_db.set_stat_exif_for_file(dest, (None, None, None))
|
||||
export_db.set_exifdata_for_file(dest, None)
|
||||
break
|
||||
|
||||
if not found_match:
|
||||
logging.debug(
|
||||
f"Didn't find destination match for uuid {self.uuid} {dest}"
|
||||
)
|
||||
|
||||
# increment the destination file
|
||||
count = 1
|
||||
glob_str = str(dest.parent / f"{dest.stem}*")
|
||||
dest_files = glob.glob(glob_str)
|
||||
dest_files = [pathlib.Path(f).stem for f in dest_files]
|
||||
dest_new = dest.stem
|
||||
while dest_new in dest_files:
|
||||
dest_new = f"{dest.stem} ({count})"
|
||||
count += 1
|
||||
dest = dest.parent / f"{dest_new}{dest.suffix}"
|
||||
logging.debug(f"New destination = {dest}, uuid = {self.uuid}")
|
||||
|
||||
# export the dest file
|
||||
results = self._export_photo(
|
||||
src,
|
||||
dest,
|
||||
update,
|
||||
export_db,
|
||||
overwrite,
|
||||
no_xattr,
|
||||
export_as_hardlink,
|
||||
exiftool,
|
||||
)
|
||||
exported_files = results.exported
|
||||
update_new_files = results.new
|
||||
update_updated_files = results.updated
|
||||
update_skipped_files = results.skipped
|
||||
|
||||
# copy live photo associated .mov if requested
|
||||
if live_photo and self.live_photo:
|
||||
live_name = dest.parent / f"{dest.stem}.mov"
|
||||
src_live = self.path_live_photo
|
||||
|
||||
if src_live is not None:
|
||||
logging.debug(
|
||||
f"Exporting live photo video of {filename} as {live_name.name}"
|
||||
)
|
||||
results = self._export_photo(
|
||||
src_live,
|
||||
live_name,
|
||||
update,
|
||||
export_db,
|
||||
overwrite,
|
||||
no_xattr,
|
||||
export_as_hardlink,
|
||||
exiftool,
|
||||
)
|
||||
exported_files.extend(results.exported)
|
||||
update_new_files.extend(results.new)
|
||||
update_updated_files.extend(results.updated)
|
||||
update_skipped_files.extend(results.skipped)
|
||||
else:
|
||||
logging.debug(f"Skipping missing live movie for {filename}")
|
||||
|
||||
# copy associated RAW image if requested
|
||||
if raw_photo and self.has_raw:
|
||||
raw_path = pathlib.Path(self.path_raw)
|
||||
raw_ext = raw_path.suffix
|
||||
raw_name = dest.parent / f"{dest.stem}{raw_ext}"
|
||||
if raw_path is not None:
|
||||
logging.debug(f"Exporting RAW photo of {filename} as {raw_name.name}")
|
||||
results = self._export_photo(
|
||||
raw_path,
|
||||
raw_name,
|
||||
update,
|
||||
export_db,
|
||||
overwrite,
|
||||
no_xattr,
|
||||
export_as_hardlink,
|
||||
exiftool,
|
||||
)
|
||||
exported_files.extend(results.exported)
|
||||
update_new_files.extend(results.new)
|
||||
update_updated_files.extend(results.updated)
|
||||
update_skipped_files.extend(results.skipped)
|
||||
else:
|
||||
logging.debug(f"Skipping missing RAW photo for {filename}")
|
||||
else:
|
||||
# use_photo_export
|
||||
exported = None
|
||||
# export live_photo .mov file?
|
||||
live_photo = True if live_photo and self.live_photo else False
|
||||
if edited:
|
||||
# exported edited version and not original
|
||||
if filename:
|
||||
# use filename stem provided
|
||||
filestem = dest.stem
|
||||
else:
|
||||
# didn't get passed a filename, add _edited
|
||||
filestem = f"{dest.stem}_edited"
|
||||
dest = dest.parent / f"{filestem}.jpeg"
|
||||
|
||||
exported = _export_photo_uuid_applescript(
|
||||
self.uuid,
|
||||
dest.parent,
|
||||
filestem=filestem,
|
||||
original=False,
|
||||
edited=True,
|
||||
live_photo=live_photo,
|
||||
timeout=timeout,
|
||||
burst=self.burst,
|
||||
)
|
||||
else:
|
||||
# export original version and not edited
|
||||
filestem = dest.stem
|
||||
exported = _export_photo_uuid_applescript(
|
||||
self.uuid,
|
||||
dest.parent,
|
||||
filestem=filestem,
|
||||
original=True,
|
||||
edited=False,
|
||||
live_photo=live_photo,
|
||||
timeout=timeout,
|
||||
burst=self.burst,
|
||||
)
|
||||
|
||||
if exported is not None:
|
||||
exported_files.extend(exported)
|
||||
else:
|
||||
logging.warning(
|
||||
f"Error exporting photo {self.uuid} to {dest} with use_photos_export"
|
||||
)
|
||||
|
||||
# export metadata
|
||||
info = export_db.get_info_for_uuid(self.uuid)
|
||||
|
||||
if sidecar_json:
|
||||
logging.debug("writing exiftool_json_sidecar")
|
||||
sidecar_filename = dest.parent / pathlib.Path(f"{dest.stem}.json")
|
||||
sidecar_str = self._exiftool_json_sidecar(
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
)
|
||||
try:
|
||||
self._write_sidecar(sidecar_filename, sidecar_str)
|
||||
except Exception as e:
|
||||
logging.warning(f"Error writing json sidecar to {sidecar_filename}")
|
||||
raise e
|
||||
|
||||
if sidecar_xmp:
|
||||
logging.debug("writing xmp_sidecar")
|
||||
sidecar_filename = dest.parent / pathlib.Path(f"{dest.stem}.xmp")
|
||||
sidecar_str = self._xmp_sidecar(
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
)
|
||||
try:
|
||||
self._write_sidecar(sidecar_filename, sidecar_str)
|
||||
except Exception as e:
|
||||
logging.warning(f"Error writing xmp sidecar to {sidecar_filename}")
|
||||
raise e
|
||||
|
||||
# if exiftool, write the metadata
|
||||
if update:
|
||||
exif_files = update_new_files + update_updated_files + update_skipped_files
|
||||
else:
|
||||
exif_files = exported_files
|
||||
|
||||
exif_files_updated = []
|
||||
if exiftool and update and exif_files:
|
||||
for exported_file in exif_files:
|
||||
logging.debug(f"checking exif for {exported_file}")
|
||||
files_are_different = False
|
||||
old_data = export_db.get_exifdata_for_file(exported_file)
|
||||
if old_data is not None:
|
||||
old_data = json.loads(old_data)[0]
|
||||
current_data = json.loads(
|
||||
self._exiftool_json_sidecar(
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
)
|
||||
)[0]
|
||||
if old_data != current_data:
|
||||
files_are_different = True
|
||||
|
||||
if old_data is None or files_are_different:
|
||||
# didn't have old data, assume we need to write it
|
||||
# or files were different
|
||||
logging.debug(f"No exifdata for {exported_file}, writing it")
|
||||
self._write_exif_data(
|
||||
exported_file,
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
)
|
||||
export_db.set_exifdata_for_file(
|
||||
exported_file,
|
||||
self._exiftool_json_sidecar(
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
),
|
||||
)
|
||||
export_db.set_stat_exif_for_file(exported_file, file_sig(exported_file))
|
||||
exif_files_updated.append(exported_file)
|
||||
elif exiftool and exif_files:
|
||||
for exported_file in exif_files:
|
||||
logging.debug(f"Writing exif data to {exported_file}")
|
||||
self._write_exif_data(
|
||||
exported_file,
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
)
|
||||
export_db.set_exifdata_for_file(
|
||||
exported_file,
|
||||
self._exiftool_json_sidecar(
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
),
|
||||
)
|
||||
export_db.set_stat_exif_for_file(exported_file, file_sig(exported_file))
|
||||
exif_files_updated.append(exported_file)
|
||||
|
||||
return ExportResults(
|
||||
exported_files,
|
||||
update_new_files,
|
||||
update_updated_files,
|
||||
update_skipped_files,
|
||||
exif_files_updated,
|
||||
)
|
||||
|
||||
|
||||
def _export_photo(
|
||||
self,
|
||||
src,
|
||||
dest,
|
||||
update,
|
||||
export_db,
|
||||
overwrite,
|
||||
no_xattr,
|
||||
export_as_hardlink,
|
||||
exiftool,
|
||||
):
|
||||
""" Helper function for export()
|
||||
Does the actual copy or hardlink taking the appropriate
|
||||
action depending on update, overwrite
|
||||
Assumes destination is the right destination (e.g. UUID matches)
|
||||
sets UUID and JSON info foo exported file using set_uuid_for_file, set_inf_for_uuido
|
||||
src: src path (string)
|
||||
dest: dest path (pathlib.Path)
|
||||
update: bool
|
||||
export_db: instance of ExportDB that conforms to ExportDB_ABC interface
|
||||
overwrite: bool
|
||||
no_xattr: don't copy extended attributes
|
||||
export_as_hardlink: bool
|
||||
exiftool: bool
|
||||
Returns: ExportResults
|
||||
"""
|
||||
|
||||
exported_files = []
|
||||
update_updated_files = []
|
||||
update_new_files = []
|
||||
update_skipped_files = []
|
||||
|
||||
dest_str = str(dest)
|
||||
dest_exists = dest.exists()
|
||||
if export_as_hardlink:
|
||||
# use hardlink instead of copy
|
||||
if not update:
|
||||
# not update, do the the hardlink
|
||||
if overwrite and dest.exists():
|
||||
# need to remove the destination first
|
||||
dest.unlink()
|
||||
logging.debug(f"Not update: export_as_hardlink linking file {src} {dest}")
|
||||
_hardlink_file(src, dest)
|
||||
export_db.set_uuid_for_file(dest_str, self.uuid)
|
||||
export_db.set_info_for_uuid(self.uuid, self.json())
|
||||
export_db.set_stat_orig_for_file(dest_str, file_sig(dest_str))
|
||||
export_db.set_stat_exif_for_file(dest_str, (None, None, None))
|
||||
export_db.set_exifdata_for_file(dest_str, None)
|
||||
exported_files.append(dest_str)
|
||||
elif dest_exists and dest.samefile(src):
|
||||
# update, hardlink and it already points to the right file, do nothing
|
||||
logging.debug(
|
||||
f"Update: skipping samefile with export_as_hardlink {src} {dest}"
|
||||
)
|
||||
update_skipped_files.append(dest_str)
|
||||
elif dest_exists:
|
||||
# update, not the same file (e.g. user may not have used export_as_hardlink last time it was run
|
||||
logging.debug(
|
||||
f"Update: removing existing file prior to export_as_hardlink {src} {dest}"
|
||||
)
|
||||
dest.unlink()
|
||||
_hardlink_file(src, dest)
|
||||
export_db.set_uuid_for_file(dest_str, self.uuid)
|
||||
export_db.set_info_for_uuid(self.uuid, self.json())
|
||||
export_db.set_stat_orig_for_file(dest_str, file_sig(dest_str))
|
||||
export_db.set_stat_exif_for_file(dest_str, (None, None, None))
|
||||
export_db.set_exifdata_for_file(dest_str, None)
|
||||
update_updated_files.append(dest_str)
|
||||
exported_files.append(dest_str)
|
||||
else:
|
||||
# update, hardlink, destination doesn't exist (new file)
|
||||
logging.debug(
|
||||
f"Update: exporting new file with export_as_hardlink {src} {dest}"
|
||||
)
|
||||
_hardlink_file(src, dest)
|
||||
export_db.set_uuid_for_file(dest_str, self.uuid)
|
||||
export_db.set_info_for_uuid(self.uuid, self.json())
|
||||
export_db.set_stat_orig_for_file(dest_str, file_sig(dest_str))
|
||||
export_db.set_stat_exif_for_file(dest_str, (None, None, None))
|
||||
export_db.set_exifdata_for_file(dest_str, None)
|
||||
exported_files.append(dest_str)
|
||||
update_new_files.append(dest_str)
|
||||
else:
|
||||
if not update:
|
||||
# not update, do the the copy
|
||||
if overwrite and dest.exists():
|
||||
# need to remove the destination first
|
||||
dest.unlink()
|
||||
logging.debug(f"Not update: copying file {src} {dest}")
|
||||
_copy_file(src, dest_str, norsrc=no_xattr)
|
||||
export_db.set_uuid_for_file(dest_str, self.uuid)
|
||||
export_db.set_info_for_uuid(self.uuid, self.json())
|
||||
export_db.set_stat_orig_for_file(dest_str, file_sig(dest_str))
|
||||
export_db.set_stat_exif_for_file(dest_str, (None, None, None))
|
||||
export_db.set_exifdata_for_file(dest_str, None)
|
||||
exported_files.append(dest_str)
|
||||
# elif dest_exists and not exiftool and cmp_file(dest_str, export_db.get_stat_orig_for_file(dest_str)):
|
||||
elif (
|
||||
dest_exists
|
||||
and not exiftool
|
||||
and filecmp.cmp(src, dest)
|
||||
and not dest.samefile(src)
|
||||
):
|
||||
# destination exists but is identical
|
||||
logging.debug(f"Update: skipping identifical original files {src} {dest}")
|
||||
# call set_stat because code can reach this spot if no export DB but exporting a RAW or live photo
|
||||
# potentially re-writes the data in the database but ensures database is complete
|
||||
export_db.set_stat_orig_for_file(dest_str, file_sig(dest_str))
|
||||
update_skipped_files.append(dest_str)
|
||||
elif (
|
||||
dest_exists
|
||||
and exiftool
|
||||
and cmp_file(dest_str, export_db.get_stat_exif_for_file(dest_str))
|
||||
and not dest.samefile(src)
|
||||
):
|
||||
# destination exists but is identical
|
||||
logging.debug(f"Update: skipping identifical exiftool files {src} {dest}")
|
||||
update_skipped_files.append(dest_str)
|
||||
elif dest_exists:
|
||||
# destination exists but is different or is a hardlink
|
||||
logging.debug(f"Update: removing existing file prior to copy {src} {dest}")
|
||||
stat_src = os.stat(src)
|
||||
stat_dest = os.stat(dest)
|
||||
dest.unlink()
|
||||
_copy_file(src, dest_str, norsrc=no_xattr)
|
||||
export_db.set_uuid_for_file(dest_str, self.uuid)
|
||||
export_db.set_info_for_uuid(self.uuid, self.json())
|
||||
export_db.set_stat_orig_for_file(dest_str, file_sig(dest_str))
|
||||
export_db.set_stat_exif_for_file(dest_str, (None, None, None))
|
||||
export_db.set_exifdata_for_file(dest_str, None)
|
||||
exported_files.append(dest_str)
|
||||
update_updated_files.append(dest_str)
|
||||
else:
|
||||
# destination doesn't exist, copy the file
|
||||
logging.debug(f"Update: copying new file {src} {dest}")
|
||||
_copy_file(src, dest_str, norsrc=no_xattr)
|
||||
export_db.set_uuid_for_file(dest_str, self.uuid)
|
||||
export_db.set_info_for_uuid(self.uuid, self.json())
|
||||
export_db.set_stat_orig_for_file(dest_str, file_sig(dest_str))
|
||||
export_db.set_stat_exif_for_file(dest_str, (None, None, None))
|
||||
export_db.set_exifdata_for_file(dest_str, None)
|
||||
exported_files.append(dest_str)
|
||||
update_new_files.append(dest_str)
|
||||
|
||||
return ExportResults(
|
||||
exported_files, update_new_files, update_updated_files, update_skipped_files, []
|
||||
)
|
||||
|
||||
|
||||
def _write_exif_data(
|
||||
self,
|
||||
filepath,
|
||||
use_albums_as_keywords=False,
|
||||
use_persons_as_keywords=False,
|
||||
keyword_template=None,
|
||||
):
|
||||
""" write exif data to image file at filepath
|
||||
filepath: full path to the image file """
|
||||
if not os.path.exists(filepath):
|
||||
raise FileNotFoundError(f"Could not find file {filepath}")
|
||||
exiftool = ExifTool(filepath)
|
||||
exif_info = json.loads(
|
||||
self._exiftool_json_sidecar(
|
||||
use_albums_as_keywords=use_albums_as_keywords,
|
||||
use_persons_as_keywords=use_persons_as_keywords,
|
||||
keyword_template=keyword_template,
|
||||
)
|
||||
)[0]
|
||||
for exiftag, val in exif_info.items():
|
||||
if type(val) == list:
|
||||
# more than one, set first value the add additional values
|
||||
exiftool.setvalue(exiftag, val.pop(0))
|
||||
if val:
|
||||
# add any remaining items
|
||||
exiftool.addvalues(exiftag, *val)
|
||||
else:
|
||||
exiftool.setvalue(exiftag, val)
|
||||
|
||||
|
||||
def _exiftool_json_sidecar(
|
||||
self,
|
||||
use_albums_as_keywords=False,
|
||||
use_persons_as_keywords=False,
|
||||
keyword_template=None,
|
||||
):
|
||||
""" return json string of EXIF details in exiftool sidecar format
|
||||
Does not include all the EXIF fields as those are likely already in the image
|
||||
use_albums_as_keywords: treat album names as keywords
|
||||
use_persons_as_keywords: treat person names as keywords
|
||||
keyword_template: (list of strings); list of template strings to render as keywords
|
||||
Exports the following:
|
||||
FileName
|
||||
ImageDescription
|
||||
Description
|
||||
Title
|
||||
TagsList
|
||||
Keywords (may include album name, person name, or template)
|
||||
Subject
|
||||
PersonInImage
|
||||
GPSLatitude, GPSLongitude
|
||||
GPSPosition
|
||||
GPSLatitudeRef, GPSLongitudeRef
|
||||
DateTimeOriginal
|
||||
OffsetTimeOriginal
|
||||
ModifyDate """
|
||||
|
||||
exif = {}
|
||||
exif["_CreatedBy"] = "osxphotos, https://github.com/RhetTbull/osxphotos"
|
||||
|
||||
if self.description:
|
||||
exif["EXIF:ImageDescription"] = self.description
|
||||
exif["XMP:Description"] = self.description
|
||||
|
||||
if self.title:
|
||||
exif["XMP:Title"] = self.title
|
||||
|
||||
keyword_list = []
|
||||
if self.keywords:
|
||||
keyword_list.extend(self.keywords)
|
||||
|
||||
person_list = []
|
||||
if self.persons:
|
||||
# filter out _UNKNOWN_PERSON
|
||||
person_list = sorted([p for p in self.persons if p != _UNKNOWN_PERSON])
|
||||
|
||||
if use_persons_as_keywords and person_list:
|
||||
keyword_list.extend(sorted(person_list))
|
||||
|
||||
if use_albums_as_keywords and self.albums:
|
||||
keyword_list.extend(sorted(self.albums))
|
||||
|
||||
if keyword_template:
|
||||
rendered_keywords = []
|
||||
for template_str in keyword_template:
|
||||
rendered, unmatched = self.render_template(
|
||||
template_str, none_str=_OSXPHOTOS_NONE_SENTINEL, path_sep="/"
|
||||
)
|
||||
if unmatched:
|
||||
logging.warning(
|
||||
f"Unmatched template substitution for template: {template_str} {unmatched}"
|
||||
)
|
||||
rendered_keywords.extend(rendered)
|
||||
|
||||
# filter out any template values that didn't match by looking for sentinel
|
||||
rendered_keywords = [
|
||||
keyword
|
||||
for keyword in sorted(rendered_keywords)
|
||||
if _OSXPHOTOS_NONE_SENTINEL not in keyword
|
||||
]
|
||||
|
||||
# check to see if any keywords too long
|
||||
long_keywords = [
|
||||
long_str
|
||||
for long_str in rendered_keywords
|
||||
if len(long_str) > _MAX_IPTC_KEYWORD_LEN
|
||||
]
|
||||
if long_keywords:
|
||||
logging.warning(
|
||||
f"Some keywords exceed max IPTC Keyword length of {_MAX_IPTC_KEYWORD_LEN}: {long_keywords}"
|
||||
)
|
||||
|
||||
keyword_list.extend(rendered_keywords)
|
||||
|
||||
if keyword_list:
|
||||
exif["XMP:TagsList"] = exif["IPTC:Keywords"] = keyword_list
|
||||
|
||||
if person_list:
|
||||
exif["XMP:PersonInImage"] = person_list
|
||||
|
||||
if self.keywords or person_list:
|
||||
# Photos puts both keywords and persons in Subject when using "Export IPTC as XMP"
|
||||
# only use Photos' keywords for subject
|
||||
exif["XMP:Subject"] = list(self.keywords) + person_list
|
||||
|
||||
# if self.favorite():
|
||||
# exif["Rating"] = 5
|
||||
|
||||
(lat, lon) = self.location
|
||||
if lat is not None and lon is not None:
|
||||
lat_str, lon_str = dd_to_dms_str(lat, lon)
|
||||
exif["EXIF:GPSLatitude"] = lat_str
|
||||
exif["EXIF:GPSLongitude"] = lon_str
|
||||
exif["Composite:GPSPosition"] = f"{lat_str}, {lon_str}"
|
||||
lat_ref = "North" if lat >= 0 else "South"
|
||||
lon_ref = "East" if lon >= 0 else "West"
|
||||
exif["EXIF:GPSLatitudeRef"] = lat_ref
|
||||
exif["EXIF:GPSLongitudeRef"] = lon_ref
|
||||
|
||||
# process date/time and timezone offset
|
||||
date = self.date
|
||||
# exiftool expects format to "2015:01:18 12:00:00"
|
||||
datetimeoriginal = date.strftime("%Y:%m:%d %H:%M:%S")
|
||||
offsettime = date.strftime("%z")
|
||||
# find timezone offset in format "-04:00"
|
||||
offset = re.findall(r"([+-]?)([\d]{2})([\d]{2})", offsettime)
|
||||
offset = offset[0] # findall returns list of tuples
|
||||
offsettime = f"{offset[0]}{offset[1]}:{offset[2]}"
|
||||
exif["EXIF:DateTimeOriginal"] = datetimeoriginal
|
||||
exif["EXIF:OffsetTimeOriginal"] = offsettime
|
||||
|
||||
if self.date_modified is not None:
|
||||
exif["EXIF:ModifyDate"] = self.date_modified.strftime("%Y:%m:%d %H:%M:%S")
|
||||
|
||||
json_str = json.dumps([exif])
|
||||
return json_str
|
||||
|
||||
|
||||
def _xmp_sidecar(
|
||||
self,
|
||||
use_albums_as_keywords=False,
|
||||
use_persons_as_keywords=False,
|
||||
keyword_template=None,
|
||||
):
|
||||
""" returns string for XMP sidecar
|
||||
use_albums_as_keywords: treat album names as keywords
|
||||
use_persons_as_keywords: treat person names as keywords
|
||||
keyword_template: (list of strings); list of template strings to render as keywords """
|
||||
|
||||
# TODO: add additional fields to XMP file?
|
||||
|
||||
xmp_template = Template(filename=os.path.join(_TEMPLATE_DIR, _XMP_TEMPLATE_NAME))
|
||||
|
||||
keyword_list = []
|
||||
if self.keywords:
|
||||
keyword_list.extend(self.keywords)
|
||||
|
||||
# TODO: keyword handling in this and _exiftool_json_sidecar is
|
||||
# good candidate for pulling out in a function
|
||||
|
||||
person_list = []
|
||||
if self.persons:
|
||||
# filter out _UNKNOWN_PERSON
|
||||
person_list = [p for p in self.persons if p != _UNKNOWN_PERSON]
|
||||
|
||||
if use_persons_as_keywords and person_list:
|
||||
keyword_list.extend(person_list)
|
||||
|
||||
if use_albums_as_keywords and self.albums:
|
||||
keyword_list.extend(self.albums)
|
||||
|
||||
if keyword_template:
|
||||
rendered_keywords = []
|
||||
for template_str in keyword_template:
|
||||
rendered, unmatched = self.render_template(
|
||||
template_str, none_str=_OSXPHOTOS_NONE_SENTINEL, path_sep="/"
|
||||
)
|
||||
if unmatched:
|
||||
logging.warning(
|
||||
f"Unmatched template substitution for template: {template_str} {unmatched}"
|
||||
)
|
||||
rendered_keywords.extend(rendered)
|
||||
|
||||
# filter out any template values that didn't match by looking for sentinel
|
||||
rendered_keywords = [
|
||||
keyword
|
||||
for keyword in rendered_keywords
|
||||
if _OSXPHOTOS_NONE_SENTINEL not in keyword
|
||||
]
|
||||
|
||||
# check to see if any keywords too long
|
||||
long_keywords = [
|
||||
long_str
|
||||
for long_str in rendered_keywords
|
||||
if len(long_str) > _MAX_IPTC_KEYWORD_LEN
|
||||
]
|
||||
if long_keywords:
|
||||
logging.warning(
|
||||
f"Some keywords exceed max IPTC Keyword length of {_MAX_IPTC_KEYWORD_LEN}: {long_keywords}"
|
||||
)
|
||||
|
||||
keyword_list.extend(rendered_keywords)
|
||||
|
||||
subject_list = []
|
||||
if self.keywords or person_list:
|
||||
# Photos puts both keywords and persons in Subject when using "Export IPTC as XMP"
|
||||
subject_list = list(self.keywords) + person_list
|
||||
|
||||
xmp_str = xmp_template.render(
|
||||
photo=self, keywords=keyword_list, persons=person_list, subjects=subject_list
|
||||
)
|
||||
|
||||
# remove extra lines that mako inserts from template
|
||||
xmp_str = "\n".join([line for line in xmp_str.split("\n") if line.strip() != ""])
|
||||
return xmp_str
|
||||
|
||||
|
||||
def _write_sidecar(self, filename, sidecar_str):
|
||||
""" write sidecar_str to filename
|
||||
used for exporting sidecar info """
|
||||
if not filename and not sidecar_str:
|
||||
raise (
|
||||
ValueError(
|
||||
f"filename {filename} and sidecar_str {sidecar_str} must not be None"
|
||||
)
|
||||
)
|
||||
|
||||
# TODO: catch exception?
|
||||
f = open(filename, "w")
|
||||
f.write(sidecar_str)
|
||||
f.close()
|
||||
93
osxphotos/photoinfo/_photoinfo_searchinfo.py
Normal file
93
osxphotos/photoinfo/_photoinfo_searchinfo.py
Normal file
@@ -0,0 +1,93 @@
|
||||
""" Methods and class for PhotoInfo exposing SearchInfo data such as labels
|
||||
Adds the following properties to PhotoInfo (valid only for Photos 5):
|
||||
search_info: returns a SearchInfo object
|
||||
labels: returns list of labels
|
||||
labels_normalized: returns list of normalized labels
|
||||
"""
|
||||
|
||||
from .._constants import _PHOTOS_4_VERSION, SEARCH_CATEGORY_LABEL
|
||||
|
||||
|
||||
@property
|
||||
def search_info(self):
|
||||
""" returns SearchInfo object for photo
|
||||
only valid on Photos 5, on older libraries, returns None
|
||||
"""
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
return None
|
||||
|
||||
# memoize SearchInfo object
|
||||
try:
|
||||
return self._search_info
|
||||
except AttributeError:
|
||||
self._search_info = SearchInfo(self)
|
||||
return self._search_info
|
||||
|
||||
|
||||
@property
|
||||
def labels(self):
|
||||
""" returns list of labels applied to photo by Photos image categorization
|
||||
only valid on Photos 5, on older libraries returns empty list
|
||||
"""
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
return []
|
||||
|
||||
return self.search_info.labels
|
||||
|
||||
|
||||
@property
|
||||
def labels_normalized(self):
|
||||
""" returns normalized list of labels applied to photo by Photos image categorization
|
||||
only valid on Photos 5, on older libraries returns empty list
|
||||
"""
|
||||
if self._db._db_version <= _PHOTOS_4_VERSION:
|
||||
return []
|
||||
|
||||
return self.search_info.labels_normalized
|
||||
|
||||
|
||||
class SearchInfo:
|
||||
""" Info about search terms such as machine learning labels that Photos knows about a photo """
|
||||
|
||||
def __init__(self, photo):
|
||||
""" photo: PhotoInfo object """
|
||||
|
||||
if photo._db._db_version <= _PHOTOS_4_VERSION:
|
||||
raise NotImplementedError(
|
||||
f"search info not implemented for this database version"
|
||||
)
|
||||
|
||||
self._photo = photo
|
||||
self.uuid = photo.uuid
|
||||
try:
|
||||
# get search info for this UUID
|
||||
# there might not be any search info data (e.g. if Photo was missing or photoanalysisd not run yet)
|
||||
self._db_searchinfo = photo._db._db_searchinfo_uuid[self.uuid]
|
||||
except KeyError:
|
||||
self._db_searchinfo = None
|
||||
|
||||
@property
|
||||
def labels(self):
|
||||
""" return list of labels associated with Photo """
|
||||
if self._db_searchinfo:
|
||||
labels = [
|
||||
rec["content_string"]
|
||||
for rec in self._db_searchinfo
|
||||
if rec["category"] == SEARCH_CATEGORY_LABEL
|
||||
]
|
||||
else:
|
||||
labels = []
|
||||
return labels
|
||||
|
||||
@property
|
||||
def labels_normalized(self):
|
||||
""" return list of normalized labels associated with Photo """
|
||||
if self._db_searchinfo:
|
||||
labels = [
|
||||
rec["normalized_string"]
|
||||
for rec in self._db_searchinfo
|
||||
if rec["category"] == SEARCH_CATEGORY_LABEL
|
||||
]
|
||||
else:
|
||||
labels = []
|
||||
return labels
|
||||
1101
osxphotos/photoinfo/photoinfo.py
Normal file
1101
osxphotos/photoinfo/photoinfo.py
Normal file
File diff suppressed because it is too large
Load Diff
66
osxphotos/photoinfo/template.py
Normal file
66
osxphotos/photoinfo/template.py
Normal file
@@ -0,0 +1,66 @@
|
||||
""" Custom template system for osxphotos (implemented in PhotoInfo.render_template) """
|
||||
|
||||
# Rolled my own template system because:
|
||||
# 1. Needed to handle multiple values (e.g. album, keyword)
|
||||
# 2. Needed to handle default values if template not found
|
||||
# 3. Didn't want user to need to know python (e.g. by using Mako which is
|
||||
# already used elsewhere in this project)
|
||||
# 4. Couldn't figure out how to do #1 and #2 with str.format()
|
||||
#
|
||||
# This code isn't elegant but it seems to work well. PRs gladly accepted.
|
||||
|
||||
import locale
|
||||
|
||||
# ensure locale set to user's locale
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
|
||||
# Permitted substitutions (each of these returns a single value or None)
|
||||
TEMPLATE_SUBSTITUTIONS = {
|
||||
"{name}": "Current filename of the photo",
|
||||
"{original_name}": "Photo's original filename when imported to Photos",
|
||||
"{title}": "Title of the photo",
|
||||
"{descr}": "Description of the photo",
|
||||
"{created.date}": "Photo's creation date in ISO format, e.g. '2020-03-22'",
|
||||
"{created.year}": "4-digit year of file creation time",
|
||||
"{created.yy}": "2-digit year of file creation time",
|
||||
"{created.mm}": "2-digit month of the file creation time (zero padded)",
|
||||
"{created.month}": "Month name in user's locale of the file creation time",
|
||||
"{created.mon}": "Month abbreviation in the user's locale of the file creation time",
|
||||
"{created.doy}": "3-digit day of year (e.g Julian day) of file creation time, starting from 1 (zero padded)",
|
||||
"{modified.date}": "Photo's modification date in ISO format, e.g. '2020-03-22'",
|
||||
"{modified.year}": "4-digit year of file modification time",
|
||||
"{modified.yy}": "2-digit year of file modification time",
|
||||
"{modified.mm}": "2-digit month of the file modification time (zero padded)",
|
||||
"{modified.month}": "Month name in user's locale of the file modification time",
|
||||
"{modified.mon}": "Month abbreviation in the user's locale of the file modification time",
|
||||
"{modified.doy}": "3-digit day of year (e.g Julian day) of file modification time, starting from 1 (zero padded)",
|
||||
"{place.name}": "Place name from the photo's reverse geolocation data, as displayed in Photos",
|
||||
"{place.country_code}": "The ISO country code from the photo's reverse geolocation data",
|
||||
"{place.name.country}": "Country name from the photo's reverse geolocation data",
|
||||
"{place.name.state_province}": "State or province name from the photo's reverse geolocation data",
|
||||
"{place.name.city}": "City or locality name from the photo's reverse geolocation data",
|
||||
"{place.name.area_of_interest}": "Area of interest name (e.g. landmark or public place) from the photo's reverse geolocation data",
|
||||
"{place.address}": "Postal address from the photo's reverse geolocation data, e.g. '2007 18th St NW, Washington, DC 20009, United States'",
|
||||
"{place.address.street}": "Street part of the postal address, e.g. '2007 18th St NW'",
|
||||
"{place.address.city}": "City part of the postal address, e.g. 'Washington'",
|
||||
"{place.address.state_province}": "State/province part of the postal address, e.g. 'DC'",
|
||||
"{place.address.postal_code}": "Postal code part of the postal address, e.g. '20009'",
|
||||
"{place.address.country}": "Country name of the postal address, e.g. 'United States'",
|
||||
"{place.address.country_code}": "ISO country code of the postal address, e.g. 'US'",
|
||||
}
|
||||
|
||||
# Permitted multi-value substitutions (each of these returns None or 1 or more values)
|
||||
TEMPLATE_SUBSTITUTIONS_MULTI_VALUED = {
|
||||
"{album}": "Album(s) photo is contained in",
|
||||
"{folder_album}": "Folder path + album photo is contained in. e.g. 'Folder/Subfolder/Album' or just 'Album' if no enclosing folder",
|
||||
"{keyword}": "Keyword(s) assigned to photo",
|
||||
"{person}": "Person(s) / face(s) in a photo",
|
||||
"{label}": "Image categorization label associated with a photo (Photos 5 only)",
|
||||
"{label_normalized}": "All lower case version of 'label' (Photos 5 only)",
|
||||
}
|
||||
|
||||
# Just the multi-valued substitution names without the braces
|
||||
MULTI_VALUE_SUBSTITUTIONS = [
|
||||
field.replace("{", "").replace("}", "")
|
||||
for field in TEMPLATE_SUBSTITUTIONS_MULTI_VALUED.keys()
|
||||
]
|
||||
6
osxphotos/photosdb/__init__.py
Normal file
6
osxphotos/photosdb/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""
|
||||
PhotosDB class
|
||||
Processes a Photos.app library database to extract information about photos
|
||||
"""
|
||||
|
||||
from .photosdb import PhotosDB
|
||||
56
osxphotos/photosdb/_photosdb_process_exif.py
Normal file
56
osxphotos/photosdb/_photosdb_process_exif.py
Normal file
@@ -0,0 +1,56 @@
|
||||
""" PhotosDB method for processing exif info
|
||||
Do not import this module directly """
|
||||
|
||||
import logging
|
||||
|
||||
from .._constants import _PHOTOS_4_VERSION
|
||||
from ..utils import _db_is_locked, _debug, _open_sql_file
|
||||
|
||||
|
||||
def _process_exifinfo(self):
|
||||
""" load the exif data from the database
|
||||
this is a PhotosDB method that should be imported in
|
||||
the PhotosDB class definition in photosdb.py
|
||||
"""
|
||||
if self._db_version <= _PHOTOS_4_VERSION:
|
||||
_process_exifinfo_4(self)
|
||||
else:
|
||||
_process_exifinfo_5(self)
|
||||
|
||||
|
||||
# The following methods do not get imported into PhotosDB
|
||||
# but will get called by _process_exifinfo
|
||||
|
||||
|
||||
def _process_exifinfo_4(photosdb):
|
||||
""" process exif info for Photos <= 4
|
||||
photosdb: PhotosDB instance """
|
||||
photosdb._db_exifinfo_uuid = {}
|
||||
raise NotImplementedError(f"search info not implemented for this database version")
|
||||
|
||||
|
||||
def _process_exifinfo_5(photosdb):
|
||||
""" process exif info for Photos >= 5
|
||||
photosdb: PhotosDB instance """
|
||||
|
||||
db = photosdb._tmp_db
|
||||
|
||||
(conn, cursor) = _open_sql_file(db)
|
||||
|
||||
result = conn.execute(
|
||||
"""
|
||||
SELECT ZGENERICASSET.ZUUID, ZEXTENDEDATTRIBUTES.*
|
||||
FROM ZGENERICASSET
|
||||
JOIN ZEXTENDEDATTRIBUTES
|
||||
ON ZEXTENDEDATTRIBUTES.ZASSET = ZGENERICASSET.Z_PK
|
||||
"""
|
||||
)
|
||||
|
||||
photosdb._db_exifinfo_uuid = {}
|
||||
cols = [c[0] for c in result.description]
|
||||
for row in result.fetchall():
|
||||
record = dict(zip(cols, row))
|
||||
uuid = record["ZUUID"]
|
||||
if uuid in photosdb._db_exifinfo_uuid:
|
||||
logging.warning(f"duplicate exifinfo record found for uuid {uuid}")
|
||||
photosdb._db_exifinfo_uuid[uuid] = record
|
||||
197
osxphotos/photosdb/_photosdb_process_searchinfo.py
Normal file
197
osxphotos/photosdb/_photosdb_process_searchinfo.py
Normal file
@@ -0,0 +1,197 @@
|
||||
""" Methods for PhotosDB to add Photos 5 search info such as machine learning labels
|
||||
Kudos to Simon Willison who figured out how to extract this data from psi.sql
|
||||
ref: https://github.com/dogsheep/photos-to-sqlite/issues/16
|
||||
"""
|
||||
|
||||
import logging
|
||||
import pathlib
|
||||
import uuid as uuidlib
|
||||
from pprint import pformat
|
||||
|
||||
from .._constants import _PHOTOS_4_VERSION, SEARCH_CATEGORY_LABEL
|
||||
from ..utils import _db_is_locked, _debug, _open_sql_file
|
||||
|
||||
"""
|
||||
This module should be imported in the class defintion of PhotosDB in photosdb.py
|
||||
Do not import this module directly
|
||||
This module adds the following method to PhotosDB:
|
||||
_process_searchinfo: process search terms from psi.sqlite
|
||||
|
||||
The following properties are added to PhotosDB
|
||||
labels: list of all labels in the library
|
||||
labels_normalized: list of all labels normalized in the library
|
||||
labels_as_dict: dict of {label: count of photos} in reverse sorted order (most photos first)
|
||||
labels_normalized_as_dict: dict of {normalized label: count of photos} in reverse sorted order (most photos first)
|
||||
|
||||
The following data structures are added to PhotosDB
|
||||
self._db_searchinfo_categories
|
||||
self._db_searchinfo_uuid
|
||||
self._db_searchinfo_labels
|
||||
self._db_searchinfo_labels_normalized
|
||||
|
||||
These methods only work on Photos 5 databases. Will print warning on earlier library versions.
|
||||
"""
|
||||
|
||||
|
||||
def _process_searchinfo(self):
|
||||
""" load machine learning/search term label info from a Photos library
|
||||
db_connection: a connection to the SQLite database file containing the
|
||||
search terms. In Photos 5, this is called psi.sqlite
|
||||
Note: Only works on Photos version == 5.0 """
|
||||
|
||||
if self._db_version <= _PHOTOS_4_VERSION:
|
||||
raise NotImplementedError(
|
||||
f"search info not implemented for this database version"
|
||||
)
|
||||
|
||||
search_db_path = pathlib.Path(self._dbfile).parent / "search" / "psi.sqlite"
|
||||
if not search_db_path.exists():
|
||||
raise FileNotFoundError(f"could not find search db: {search_db_path}")
|
||||
|
||||
if _db_is_locked(search_db_path):
|
||||
search_db = self._copy_db_file(search_db_path)
|
||||
else:
|
||||
search_db = search_db_path
|
||||
|
||||
(conn, c) = _open_sql_file(search_db)
|
||||
|
||||
result = conn.execute(
|
||||
"""
|
||||
select
|
||||
ga.rowid,
|
||||
assets.uuid_0,
|
||||
assets.uuid_1,
|
||||
groups.rowid as groupid,
|
||||
groups.category,
|
||||
groups.owning_groupid,
|
||||
groups.content_string,
|
||||
groups.normalized_string,
|
||||
groups.lookup_identifier
|
||||
from
|
||||
ga
|
||||
join groups on groups.rowid = ga.groupid
|
||||
join assets on ga.assetid = assets.rowid
|
||||
order by
|
||||
ga.rowid
|
||||
"""
|
||||
)
|
||||
|
||||
# _db_searchinfo_uuid is dict in form {uuid : [list of associated search info records]
|
||||
_db_searchinfo_uuid = {}
|
||||
|
||||
# _db_searchinfo_categories is dict in form {search info category id: list normalized strings for the category
|
||||
# right now, this is mostly for debugging to easily see which search terms are in the library
|
||||
_db_searchinfo_categories = {}
|
||||
|
||||
# _db_searchinfo_labels is dict in form {normalized label: [list of photo uuids]}
|
||||
# this serves as a reverse index from label to photos containing the label
|
||||
# _db_searchinfo_labels_normalized is the same but with normalized (lower case) version of the label
|
||||
_db_searchinfo_labels = {}
|
||||
_db_searchinfo_labels_normalized = {}
|
||||
|
||||
cols = [c[0] for c in result.description]
|
||||
for row in result.fetchall():
|
||||
record = dict(zip(cols, row))
|
||||
uuid = ints_to_uuid(record["uuid_0"], record["uuid_1"])
|
||||
# strings have null character appended, so strip it
|
||||
for key in record:
|
||||
if isinstance(record[key], str):
|
||||
record[key] = record[key].replace("\x00", "")
|
||||
try:
|
||||
_db_searchinfo_uuid[uuid].append(record)
|
||||
except KeyError:
|
||||
_db_searchinfo_uuid[uuid] = [record]
|
||||
|
||||
category = record["category"]
|
||||
try:
|
||||
_db_searchinfo_categories[record["category"]].append(
|
||||
record["normalized_string"]
|
||||
)
|
||||
except KeyError:
|
||||
_db_searchinfo_categories[record["category"]] = [
|
||||
record["normalized_string"]
|
||||
]
|
||||
|
||||
if record["category"] == SEARCH_CATEGORY_LABEL:
|
||||
label = record["content_string"]
|
||||
label_norm = record["normalized_string"]
|
||||
try:
|
||||
_db_searchinfo_labels[label].append(uuid)
|
||||
_db_searchinfo_labels_normalized[label_norm].append(uuid)
|
||||
except KeyError:
|
||||
_db_searchinfo_labels[label] = [uuid]
|
||||
_db_searchinfo_labels_normalized[label_norm] = [uuid]
|
||||
|
||||
self._db_searchinfo_categories = _db_searchinfo_categories
|
||||
self._db_searchinfo_uuid = _db_searchinfo_uuid
|
||||
self._db_searchinfo_labels = _db_searchinfo_labels
|
||||
self._db_searchinfo_labels_normalized = _db_searchinfo_labels_normalized
|
||||
|
||||
if _debug():
|
||||
logging.debug(
|
||||
"_db_searchinfo_categories: \n" + pformat(self._db_searchinfo_categories)
|
||||
)
|
||||
logging.debug("_db_searchinfo_uuid: \n" + pformat(self._db_searchinfo_uuid))
|
||||
logging.debug("_db_searchinfo_labels: \n" + pformat(self._db_searchinfo_labels))
|
||||
logging.debug(
|
||||
"_db_searchinfo_labels_normalized: \n"
|
||||
+ pformat(self._db_searchinfo_labels_normalized)
|
||||
)
|
||||
|
||||
|
||||
@property
|
||||
def labels(self):
|
||||
""" return list of all search info labels found in the library """
|
||||
if self._db_version <= _PHOTOS_4_VERSION:
|
||||
logging.warning(f"SearchInfo not implemented for this library version")
|
||||
return []
|
||||
|
||||
return list(self._db_searchinfo_labels.keys())
|
||||
|
||||
|
||||
@property
|
||||
def labels_normalized(self):
|
||||
""" return list of all normalized search info labels found in the library """
|
||||
if self._db_version <= _PHOTOS_4_VERSION:
|
||||
logging.warning(f"SearchInfo not implemented for this library version")
|
||||
return []
|
||||
|
||||
return list(self._db_searchinfo_labels_normalized.keys())
|
||||
|
||||
|
||||
@property
|
||||
def labels_as_dict(self):
|
||||
""" return labels as dict of label: count in reverse sorted order (descending) """
|
||||
if self._db_version <= _PHOTOS_4_VERSION:
|
||||
logging.warning(f"SearchInfo not implemented for this library version")
|
||||
return dict()
|
||||
|
||||
labels = {k: len(v) for k, v in self._db_searchinfo_labels.items()}
|
||||
labels = dict(sorted(labels.items(), key=lambda kv: kv[1], reverse=True))
|
||||
return labels
|
||||
|
||||
|
||||
@property
|
||||
def labels_normalized_as_dict(self):
|
||||
""" return normalized labels as dict of label: count in reverse sorted order (descending) """
|
||||
if self._db_version <= _PHOTOS_4_VERSION:
|
||||
logging.warning(f"SearchInfo not implemented for this library version")
|
||||
return dict()
|
||||
labels = {k: len(v) for k, v in self._db_searchinfo_labels_normalized.items()}
|
||||
labels = dict(sorted(labels.items(), key=lambda kv: kv[1], reverse=True))
|
||||
return labels
|
||||
|
||||
|
||||
# The following method is not imported into PhotosDB
|
||||
|
||||
|
||||
def ints_to_uuid(uuid_0, uuid_1):
|
||||
""" convert two signed ints into a UUID strings
|
||||
uuid_0, uuid_1: the two int components of an RFC 4122 UUID """
|
||||
|
||||
# assumes uuid imported as uuidlib (to avoid namespace conflict with other uses of uuid)
|
||||
|
||||
bytes_ = uuid_0.to_bytes(8, "little", signed=True) + uuid_1.to_bytes(
|
||||
8, "little", signed=True
|
||||
)
|
||||
return str(uuidlib.UUID(bytes=bytes_)).upper()
|
||||
File diff suppressed because it is too large
Load Diff
645
osxphotos/placeinfo.py
Normal file
645
osxphotos/placeinfo.py
Normal file
@@ -0,0 +1,645 @@
|
||||
"""
|
||||
PlaceInfo class
|
||||
Provides reverse geolocation info for photos
|
||||
|
||||
See https://developer.apple.com/documentation/corelocation/clplacemark
|
||||
for additional documentation on reverse geolocation data
|
||||
"""
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import namedtuple # pylint: disable=syntax-error
|
||||
|
||||
import yaml
|
||||
from bpylist import archiver
|
||||
|
||||
# postal address information, returned by PlaceInfo.address
|
||||
PostalAddress = namedtuple(
|
||||
"PostalAddress",
|
||||
[
|
||||
"street",
|
||||
"sub_locality",
|
||||
"city",
|
||||
"sub_administrative_area",
|
||||
"state_province",
|
||||
"postal_code",
|
||||
"country",
|
||||
"iso_country_code",
|
||||
],
|
||||
)
|
||||
|
||||
# PlaceNames tuple returned by PlaceInfo.names
|
||||
# order of fields 0 - 17 is mapped to placeType value in
|
||||
# PLRevGeoLocationInfo.mapInfo.sortedPlaceInfos
|
||||
# field 18 is combined bodies of water (ocean + inland_water)
|
||||
# and maps to Photos <= 4, RKPlace.type == 44
|
||||
# (Photos <= 4 doesn't have ocean or inland_water types)
|
||||
# The fields named "field0", etc. appear to be unused
|
||||
PlaceNames = namedtuple(
|
||||
"PlaceNames",
|
||||
[
|
||||
"field0",
|
||||
"country", # The name of the country associated with the placemark.
|
||||
"state_province", # administrativeArea, The state or province associated with the placemark.
|
||||
"sub_administrative_area", # Additional administrative area information for the placemark.
|
||||
"city", # locality, The city associated with the placemark.
|
||||
"field5",
|
||||
"additional_city_info", # subLocality, Additional city-level information for the placemark.
|
||||
"ocean", # The name of the ocean associated with the placemark.
|
||||
"area_of_interest", # areasOfInterest, The relevant areas of interest associated with the placemark.
|
||||
"inland_water", # The name of the inland water body associated with the placemark.
|
||||
"field10",
|
||||
"region", # The geographic region associated with the placemark.
|
||||
"sub_throughfare", # Additional street-level information for the placemark.
|
||||
"field13",
|
||||
"postal_code", # The postal code associated with the placemark.
|
||||
"field15",
|
||||
"field16",
|
||||
"street_address", # throughfare, The street address associated with the placemark.
|
||||
"body_of_water", # RKPlace.type == 44, appears to be any body of water (ocean or inland)
|
||||
],
|
||||
)
|
||||
|
||||
# The following classes represent Photo Library Reverse Geolocation Info as stored
|
||||
# in ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATA
|
||||
# These classes are used by bpylist.archiver to unarchive the serialized objects
|
||||
class PLRevGeoLocationInfo:
|
||||
""" The top level reverse geolocation object """
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
addressString,
|
||||
countryCode,
|
||||
mapItem,
|
||||
isHome,
|
||||
compoundNames,
|
||||
compoundSecondaryNames,
|
||||
version,
|
||||
geoServiceProvider,
|
||||
postalAddress,
|
||||
):
|
||||
self.addressString = addressString
|
||||
self.countryCode = countryCode
|
||||
self.mapItem = mapItem
|
||||
self.isHome = isHome
|
||||
self.compoundNames = compoundNames
|
||||
self.compoundSecondaryNames = compoundSecondaryNames
|
||||
self.version = version
|
||||
self.geoServiceProvider = geoServiceProvider
|
||||
self.postalAddress = postalAddress
|
||||
|
||||
def __eq__(self, other):
|
||||
for field in [
|
||||
"addressString",
|
||||
"countryCode",
|
||||
"isHome",
|
||||
"compoundNames",
|
||||
"compoundSecondaryNames",
|
||||
"version",
|
||||
"geoServiceProvider",
|
||||
"postalAddress",
|
||||
]:
|
||||
if getattr(self, field) != getattr(other, field):
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __str__(self):
|
||||
return f"addressString: {self.addressString}, countryCode: {self.countryCode}, isHome: {self.isHome}, mapItem: {self.mapItem}, postalAddress: {self.postalAddress}"
|
||||
|
||||
@staticmethod
|
||||
def encode_archive(obj, archive):
|
||||
archive.encode("addressString", obj.addressString)
|
||||
archive.encode("countryCode", obj.countryCode)
|
||||
archive.encode("mapItem", obj.mapItem)
|
||||
archive.encode("isHome", obj.isHome)
|
||||
archive.encode("compoundNames", obj.compoundNames)
|
||||
archive.encode("compoundSecondaryNames", obj.compoundSecondaryNames)
|
||||
archive.encode("version", obj.version)
|
||||
archive.encode("geoServiceProvider", obj.geoServiceProvider)
|
||||
archive.encode("postalAddress", obj.postalAddress)
|
||||
|
||||
@staticmethod
|
||||
def decode_archive(archive):
|
||||
addressString = archive.decode("addressString")
|
||||
countryCode = archive.decode("countryCode")
|
||||
mapItem = archive.decode("mapItem")
|
||||
isHome = archive.decode("isHome")
|
||||
compoundNames = archive.decode("compoundNames")
|
||||
compoundSecondaryNames = archive.decode("compoundSecondaryNames")
|
||||
version = archive.decode("version")
|
||||
geoServiceProvider = archive.decode("geoServiceProvider")
|
||||
postalAddress = archive.decode("postalAddress")
|
||||
return PLRevGeoLocationInfo(
|
||||
addressString,
|
||||
countryCode,
|
||||
mapItem,
|
||||
isHome,
|
||||
compoundNames,
|
||||
compoundSecondaryNames,
|
||||
version,
|
||||
geoServiceProvider,
|
||||
postalAddress,
|
||||
)
|
||||
|
||||
|
||||
class PLRevGeoMapItem:
|
||||
""" Stores the list of place names, organized by area """
|
||||
|
||||
def __init__(self, sortedPlaceInfos, finalPlaceInfos):
|
||||
self.sortedPlaceInfos = sortedPlaceInfos
|
||||
self.finalPlaceInfos = finalPlaceInfos
|
||||
|
||||
def __eq__(self, other):
|
||||
for field in ["sortedPlaceInfos", "finalPlaceInfos"]:
|
||||
if getattr(self, field) != getattr(other, field):
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __str__(self):
|
||||
sortedPlaceInfos = []
|
||||
finalPlaceInfos = []
|
||||
for place in self.sortedPlaceInfos:
|
||||
sortedPlaceInfos.append(str(place))
|
||||
for place in self.finalPlaceInfos:
|
||||
finalPlaceInfos.append(str(place))
|
||||
return (
|
||||
f"finalPlaceInfos: {finalPlaceInfos}, sortedPlaceInfos: {sortedPlaceInfos}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def encode_archive(obj, archive):
|
||||
archive.encode("sortedPlaceInfos", obj.sortedPlaceInfos)
|
||||
archive.encode("finalPlaceInfos", obj.finalPlaceInfos)
|
||||
|
||||
@staticmethod
|
||||
def decode_archive(archive):
|
||||
sortedPlaceInfos = archive.decode("sortedPlaceInfos")
|
||||
finalPlaceInfos = archive.decode("finalPlaceInfos")
|
||||
return PLRevGeoMapItem(sortedPlaceInfos, finalPlaceInfos)
|
||||
|
||||
|
||||
class PLRevGeoMapItemAdditionalPlaceInfo:
|
||||
""" Additional info about individual places """
|
||||
|
||||
def __init__(self, area, name, placeType, dominantOrderType):
|
||||
self.area = area
|
||||
self.name = name
|
||||
self.placeType = placeType
|
||||
self.dominantOrderType = dominantOrderType
|
||||
|
||||
def __eq__(self, other):
|
||||
for field in ["area", "name", "placeType", "dominantOrderType"]:
|
||||
if getattr(self, field) != getattr(other, field):
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __str__(self):
|
||||
return f"area: {self.area}, name: {self.name}, placeType: {self.placeType}"
|
||||
|
||||
@staticmethod
|
||||
def encode_archive(obj, archive):
|
||||
archive.encode("area", obj.area)
|
||||
archive.encode("name", obj.name)
|
||||
archive.encode("placeType", obj.placeType)
|
||||
archive.encode("dominantOrderType", obj.dominantOrderType)
|
||||
|
||||
@staticmethod
|
||||
def decode_archive(archive):
|
||||
area = archive.decode("area")
|
||||
name = archive.decode("name")
|
||||
placeType = archive.decode("placeType")
|
||||
dominantOrderType = archive.decode("dominantOrderType")
|
||||
return PLRevGeoMapItemAdditionalPlaceInfo(
|
||||
area, name, placeType, dominantOrderType
|
||||
)
|
||||
|
||||
|
||||
class CNPostalAddress:
|
||||
""" postal address for the reverse geolocation info """
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
_ISOCountryCode,
|
||||
_city,
|
||||
_country,
|
||||
_postalCode,
|
||||
_state,
|
||||
_street,
|
||||
_subAdministrativeArea,
|
||||
_subLocality,
|
||||
):
|
||||
self._ISOCountryCode = _ISOCountryCode
|
||||
self._city = _city
|
||||
self._country = _country
|
||||
self._postalCode = _postalCode
|
||||
self._state = _state
|
||||
self._street = _street
|
||||
self._subAdministrativeArea = _subAdministrativeArea
|
||||
self._subLocality = _subLocality
|
||||
|
||||
def __eq__(self, other):
|
||||
for field in [
|
||||
"_ISOCountryCode",
|
||||
"_city",
|
||||
"_country",
|
||||
"_postalCode",
|
||||
"_state",
|
||||
"_street",
|
||||
"_subAdministrativeArea",
|
||||
"_subLocality",
|
||||
]:
|
||||
if getattr(self, field) != getattr(other, field):
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __str__(self):
|
||||
return ", ".join(
|
||||
map(
|
||||
str,
|
||||
[
|
||||
self._street,
|
||||
self._city,
|
||||
self._subLocality,
|
||||
self._subAdministrativeArea,
|
||||
self._state,
|
||||
self._postalCode,
|
||||
self._country,
|
||||
self._ISOCountryCode,
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def encode_archive(obj, archive):
|
||||
archive.encode("_ISOCountryCode", obj._ISOCountryCode)
|
||||
archive.encode("_country", obj._country)
|
||||
archive.encode("_city", obj._city)
|
||||
archive.encode("_postalCode", obj._postalCode)
|
||||
archive.encode("_state", obj._state)
|
||||
archive.encode("_street", obj._street)
|
||||
archive.encode("_subAdministrativeArea", obj._subAdministrativeArea)
|
||||
archive.encode("_subLocality", obj._subLocality)
|
||||
|
||||
@staticmethod
|
||||
def decode_archive(archive):
|
||||
_ISOCountryCode = archive.decode("_ISOCountryCode")
|
||||
_country = archive.decode("_country")
|
||||
_city = archive.decode("_city")
|
||||
_postalCode = archive.decode("_postalCode")
|
||||
_state = archive.decode("_state")
|
||||
_street = archive.decode("_street")
|
||||
_subAdministrativeArea = archive.decode("_subAdministrativeArea")
|
||||
_subLocality = archive.decode("_subLocality")
|
||||
|
||||
return CNPostalAddress(
|
||||
_ISOCountryCode,
|
||||
_city,
|
||||
_country,
|
||||
_postalCode,
|
||||
_state,
|
||||
_street,
|
||||
_subAdministrativeArea,
|
||||
_subLocality,
|
||||
)
|
||||
|
||||
|
||||
# register the classes with bpylist.archiver
|
||||
archiver.update_class_map({"CNPostalAddress": CNPostalAddress})
|
||||
archiver.update_class_map(
|
||||
{"PLRevGeoMapItemAdditionalPlaceInfo": PLRevGeoMapItemAdditionalPlaceInfo}
|
||||
)
|
||||
archiver.update_class_map({"PLRevGeoMapItem": PLRevGeoMapItem})
|
||||
archiver.update_class_map({"PLRevGeoLocationInfo": PLRevGeoLocationInfo})
|
||||
|
||||
|
||||
class PlaceInfo(ABC):
|
||||
@property
|
||||
@abstractmethod
|
||||
def address_str(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def country_code(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def ishome(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def name(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def names(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def address(self):
|
||||
pass
|
||||
|
||||
|
||||
class PlaceInfo4(PlaceInfo):
|
||||
""" Reverse geolocation place info for a photo (Photos <= 4) """
|
||||
|
||||
def __init__(self, place_names, country_code):
|
||||
""" place_names: list of place name tuples in ascending order by area
|
||||
tuple fields are: modelID, place name, place type, area, e.g.
|
||||
[(5, "St James's Park", 45, 0),
|
||||
(4, 'Westminster', 16, 22097376),
|
||||
(3, 'London', 4, 1596146816),
|
||||
(2, 'England', 2, 180406091776),
|
||||
(1, 'United Kingdom', 1, 414681432064)]
|
||||
country_code: two letter country code for the country
|
||||
"""
|
||||
self._place_names = place_names
|
||||
self._country_code = country_code
|
||||
self._process_place_info()
|
||||
|
||||
@property
|
||||
def address_str(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def country_code(self):
|
||||
return self._country_code
|
||||
|
||||
@property
|
||||
def ishome(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def names(self):
|
||||
return self._names
|
||||
|
||||
@property
|
||||
def address(self):
|
||||
return PostalAddress(None, None, None, None, None, None, None, None)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, type(self)):
|
||||
return False
|
||||
else:
|
||||
return (
|
||||
self._place_names == other._place_names
|
||||
and self._country_code == other._country_code
|
||||
)
|
||||
|
||||
def _process_place_info(self):
|
||||
""" Process place_names to set self._name and self._names """
|
||||
places = self._place_names
|
||||
|
||||
# build a dictionary where key is placetype
|
||||
places_dict = {}
|
||||
for p in places:
|
||||
# places in format:
|
||||
# [(5, "St James's Park", 45, 0), ]
|
||||
# 0: modelID
|
||||
# 1: name
|
||||
# 2: type
|
||||
# 3: area
|
||||
try:
|
||||
places_dict[p[2]].append((p[1], p[3]))
|
||||
except KeyError:
|
||||
places_dict[p[2]] = [(p[1], p[3])]
|
||||
|
||||
# build list to populate PlaceNames tuple
|
||||
# initialize with empty lists for each field in PlaceNames
|
||||
place_info = [[]] * 19
|
||||
|
||||
# add the place names sorted by area (ascending)
|
||||
# in Photos <=4, possible place type values are:
|
||||
# 45: areasOfInterest (The relevant areas of interest associated with the placemark.)
|
||||
# 44: body of water (includes both inlandWater and ocean)
|
||||
# 43: subLocality (Additional city-level information for the placemark.
|
||||
# 16: locality (The city associated with the placemark.)
|
||||
# 4: subAdministrativeArea (Additional administrative area information for the placemark.)
|
||||
# 2: administrativeArea (The state or province associated with the placemark.)
|
||||
# 1: country
|
||||
# mapping = mapping from PlaceNames to field in places_dict
|
||||
# PlaceNames fields map to the placeType value in Photos5 (0..17)
|
||||
# but place type in Photos <=4 has different values
|
||||
# hence (3, 4) means PlaceNames[3] = places_dict[4] (sub_administrative_area)
|
||||
mapping = [(1, 1), (2, 2), (3, 4), (4, 16), (18, 44), (8, 45)]
|
||||
for field5, field4 in mapping:
|
||||
try:
|
||||
place_info[field5] = [
|
||||
p[0]
|
||||
for p in sorted(places_dict[field4], key=lambda place: place[1])
|
||||
]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
place_names = PlaceNames(*place_info)
|
||||
self._names = place_names
|
||||
|
||||
# build the name as it appears in Photos
|
||||
# the length of the name is at most 3 fields and appears to be based on available
|
||||
# reverse geolocation data in the following order (left to right, joined by ',')
|
||||
# always has country if available then either area of interest and city OR
|
||||
# city and state
|
||||
# e.g. 4, 2, 1 OR 8, 4, 1
|
||||
# 8 (45): area_of_interest
|
||||
# 4 (16): locality / city
|
||||
# 2 (2): administrative area (state/province)
|
||||
# 1 (1): country
|
||||
name_list = []
|
||||
if place_names[8]:
|
||||
name_list.append(place_names[8][0])
|
||||
if place_names[4]:
|
||||
name_list.append(place_names[4][0])
|
||||
elif place_names[4]:
|
||||
name_list.append(place_names[4][0])
|
||||
if place_names[2]:
|
||||
name_list.append(place_names[2][0])
|
||||
elif place_names[2]:
|
||||
name_list.append(place_names[2][0])
|
||||
|
||||
# add country
|
||||
if place_names[1]:
|
||||
name_list.append(place_names[1][0])
|
||||
|
||||
name = ", ".join(name_list)
|
||||
self._name = name if name != "" else None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __str__(self):
|
||||
info = {
|
||||
"name": self.name,
|
||||
"names": self.names,
|
||||
"country_code": self.country_code,
|
||||
}
|
||||
strval = "PlaceInfo(" + ", ".join([f"{k}='{v}'" for k, v in info.items()]) + ")"
|
||||
return strval
|
||||
|
||||
def as_dict(self):
|
||||
info = {
|
||||
"name": self.name,
|
||||
"names": self.names._asdict(),
|
||||
"country_code": self.country_code,
|
||||
}
|
||||
return info
|
||||
|
||||
|
||||
class PlaceInfo5(PlaceInfo):
|
||||
""" Reverse geolocation place info for a photo (Photos >= 5) """
|
||||
|
||||
def __init__(self, revgeoloc_bplist):
|
||||
""" revgeoloc_bplist: a binary plist blob containing
|
||||
a serialized PLRevGeoLocationInfo object """
|
||||
self._bplist = revgeoloc_bplist
|
||||
# todo: check for None?
|
||||
self._plrevgeoloc = archiver.unarchive(revgeoloc_bplist)
|
||||
self._process_place_info()
|
||||
|
||||
@property
|
||||
def address_str(self):
|
||||
""" returns the postal address as a string """
|
||||
return self._plrevgeoloc.addressString
|
||||
|
||||
@property
|
||||
def country_code(self):
|
||||
""" returns the country code """
|
||||
return self._plrevgeoloc.countryCode
|
||||
|
||||
@property
|
||||
def ishome(self):
|
||||
""" returns True if place is user's home address """
|
||||
return self._plrevgeoloc.isHome
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
""" returns local place name """
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def names(self):
|
||||
""" returns PlaceNames tuple with detailed reverse geolocation place names """
|
||||
return self._names
|
||||
|
||||
@property
|
||||
def address(self):
|
||||
addr = self._plrevgeoloc.postalAddress
|
||||
address = PostalAddress(
|
||||
street=addr._street,
|
||||
sub_locality=addr._subLocality,
|
||||
city=addr._city,
|
||||
sub_administrative_area=addr._subAdministrativeArea,
|
||||
state_province=addr._state,
|
||||
postal_code=addr._postalCode,
|
||||
country=addr._country,
|
||||
iso_country_code=addr._ISOCountryCode,
|
||||
)
|
||||
return address
|
||||
|
||||
def _process_place_info(self):
|
||||
""" Process sortedPlaceInfos to set self._name and self._names """
|
||||
places = self._plrevgeoloc.mapItem.sortedPlaceInfos
|
||||
|
||||
# build a dictionary where key is placetype
|
||||
places_dict = {}
|
||||
for p in places:
|
||||
try:
|
||||
places_dict[p.placeType].append((p.name, p.area))
|
||||
except KeyError:
|
||||
places_dict[p.placeType] = [(p.name, p.area)]
|
||||
|
||||
# build list to populate PlaceNames tuple
|
||||
place_info = []
|
||||
for field in range(18):
|
||||
try:
|
||||
# add the place names sorted by area (ascending)
|
||||
place_info.append(
|
||||
[
|
||||
p[0]
|
||||
for p in sorted(places_dict[field], key=lambda place: place[1])
|
||||
]
|
||||
)
|
||||
except:
|
||||
place_info.append([])
|
||||
|
||||
# fill in body_of_water for compatibility with Photos <= 4
|
||||
place_info.append(place_info[7] + place_info[9])
|
||||
|
||||
place_names = PlaceNames(*place_info)
|
||||
self._names = place_names
|
||||
|
||||
# build the name as it appears in Photos
|
||||
# the length of the name is variable and appears to be based on available
|
||||
# reverse geolocation data in the following order (left to right, joined by ',')
|
||||
# 8: area_of_interest
|
||||
# 11: region (I've only seen this applied to islands)
|
||||
# 4: locality / city
|
||||
# 2: administrative area (state/province)
|
||||
# 1: country
|
||||
# 9: inland_water
|
||||
# 7: ocean
|
||||
name = ", ".join(
|
||||
[
|
||||
p[0]
|
||||
for p in [
|
||||
place_names[8], # area of interest
|
||||
place_names[11], # region (I've only seen this applied to islands)
|
||||
place_names[4], # locality / city
|
||||
place_names[2], # administrative area (state/province)
|
||||
place_names[1], # country
|
||||
place_names[9], # inland_water
|
||||
place_names[7], # ocean
|
||||
]
|
||||
if p and p[0]
|
||||
]
|
||||
)
|
||||
self._name = name if name != "" else None
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, type(self)):
|
||||
return False
|
||||
else:
|
||||
return self._plrevgeoloc == other._plrevgeoloc
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __str__(self):
|
||||
info = {
|
||||
"name": self.name,
|
||||
"names": self.names,
|
||||
"country_code": self.country_code,
|
||||
"ishome": self.ishome,
|
||||
"address_str": self.address_str,
|
||||
"address": str(self.address),
|
||||
}
|
||||
strval = "PlaceInfo(" + ", ".join([f"{k}='{v}'" for k, v in info.items()]) + ")"
|
||||
return strval
|
||||
|
||||
def as_dict(self):
|
||||
info = {
|
||||
"name": self.name,
|
||||
"names": self.names._asdict(),
|
||||
"country_code": self.country_code,
|
||||
"ishome": self.ishome,
|
||||
"address_str": self.address_str,
|
||||
"address": self.address._asdict(),
|
||||
}
|
||||
return info
|
||||
@@ -79,16 +79,16 @@
|
||||
xmlns:photoshop="http://ns.adobe.com/photoshop/1.0/">
|
||||
${dc_description(photo.description)}
|
||||
${dc_title(photo.title)}
|
||||
${dc_subject(photo.keywords + photo.persons)}
|
||||
${dc_subject(subjects)}
|
||||
${dc_datecreated(photo.date)}
|
||||
</rdf:Description>
|
||||
<rdf:Description rdf:about=''
|
||||
xmlns:Iptc4xmpExt='http://iptc.org/std/Iptc4xmpExt/2008-02-29/'>
|
||||
${iptc_personinimage(photo.persons)}
|
||||
${iptc_personinimage(persons)}
|
||||
</rdf:Description>
|
||||
<rdf:Description rdf:about=''
|
||||
xmlns:digiKam='http://www.digikam.org/ns/1.0/'>
|
||||
${dk_tagslist(photo.keywords)}
|
||||
${dk_tagslist(keywords)}
|
||||
</rdf:Description>
|
||||
<rdf:Description rdf:about=''
|
||||
xmlns:xmp='http://ns.adobe.com/xap/1.0/'>
|
||||
|
||||
@@ -1,15 +1,20 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
import pathlib
|
||||
from plistlib import load as plistload
|
||||
|
||||
import CoreFoundation
|
||||
import CoreServices
|
||||
import objc
|
||||
from Foundation import *
|
||||
|
||||
@@ -113,10 +118,34 @@ def _dd_to_dms(dd):
|
||||
return int(deg_), int(min_), sec_
|
||||
|
||||
|
||||
def _copy_file(src, dest):
|
||||
def _hardlink_file(src, dest):
|
||||
""" Hardlinks a file from src path to dest path
|
||||
src: source path as string
|
||||
dest: destination path as string
|
||||
Raises exception if linking fails or either path is None """
|
||||
|
||||
if src is None or dest is None:
|
||||
raise ValueError("src and dest must not be None", src, dest)
|
||||
|
||||
if not os.path.isfile(src):
|
||||
raise FileNotFoundError("src file does not appear to exist", src)
|
||||
|
||||
# if error on copy, subprocess will raise CalledProcessError
|
||||
try:
|
||||
os.link(src, dest)
|
||||
except Exception as e:
|
||||
logging.critical(f"os.link returned error: {e}")
|
||||
raise e
|
||||
|
||||
|
||||
def _copy_file(src, dest, norsrc=False):
|
||||
""" Copies a file from src path to dest path
|
||||
src: source path as string
|
||||
dest: destination path as string
|
||||
norsrc: (bool) if True, uses --norsrc flag with ditto so it will not copy
|
||||
resource fork or extended attributes. May be useful on volumes that
|
||||
don't work with extended attributes (likely only certain SMB mounts)
|
||||
default is False
|
||||
Uses ditto to perform copy; will silently overwrite dest if it exists
|
||||
Raises exception if copy fails or either path is None """
|
||||
|
||||
@@ -124,19 +153,24 @@ def _copy_file(src, dest):
|
||||
raise ValueError("src and dest must not be None", src, dest)
|
||||
|
||||
if not os.path.isfile(src):
|
||||
raise ValueError("src file does not appear to exist", src)
|
||||
raise FileNotFoundError("src file does not appear to exist", src)
|
||||
|
||||
if norsrc:
|
||||
command = ["/usr/bin/ditto", "--norsrc", src, dest]
|
||||
else:
|
||||
command = ["/usr/bin/ditto", src, dest]
|
||||
|
||||
# if error on copy, subprocess will raise CalledProcessError
|
||||
try:
|
||||
subprocess.run(
|
||||
["/usr/bin/ditto", src, dest], check=True, stderr=subprocess.PIPE
|
||||
)
|
||||
result = subprocess.run(command, check=True, stderr=subprocess.PIPE)
|
||||
except subprocess.CalledProcessError as e:
|
||||
logging.critical(
|
||||
f"ditto returned error: {e.returncode} {e.stderr.decode(sys.getfilesystemencoding()).rstrip()}"
|
||||
)
|
||||
raise e
|
||||
|
||||
return result.returncode
|
||||
|
||||
|
||||
def dd_to_dms_str(lat, lon):
|
||||
""" convert latitude, longitude in degrees to degrees, minutes, seconds as string """
|
||||
@@ -218,6 +252,8 @@ def get_last_library_path():
|
||||
|
||||
if photosurlref is not None:
|
||||
# use CFURLCreateByResolvingBookmarkData to de-serialize bookmark data into a CFURLRef
|
||||
# pylint: disable=no-member
|
||||
# pylint: disable=undefined-variable
|
||||
photosurl = CoreFoundation.CFURLCreateByResolvingBookmarkData(
|
||||
kCFAllocatorDefault, photosurlref, 0, None, None, None, None
|
||||
)
|
||||
@@ -255,7 +291,7 @@ def list_photo_libraries():
|
||||
# On older MacOS versions, mdfind appears to ignore some libraries
|
||||
# glob to find libraries in ~/Pictures then mdfind to find all the others
|
||||
# TODO: make this more robust
|
||||
lib_list = glob.glob(f"{str(Path.home())}/Pictures/*.photoslibrary")
|
||||
lib_list = glob.glob(f"{str(pathlib.Path.home())}/Pictures/*.photoslibrary")
|
||||
|
||||
# On older OS, may not get all libraries so make sure we get the last one
|
||||
last_lib = get_last_library_path()
|
||||
@@ -290,6 +326,28 @@ def create_path_by_date(dest, dt):
|
||||
return new_dest
|
||||
|
||||
|
||||
def get_preferred_uti_extension(uti):
|
||||
""" get preferred extension for a UTI type
|
||||
uti: UTI str, e.g. 'public.jpeg'
|
||||
returns: preferred extension as str """
|
||||
|
||||
# reference: https://developer.apple.com/documentation/coreservices/1442744-uttypecopypreferredtagwithclass?language=objc
|
||||
|
||||
ext = CoreServices.UTTypeCopyPreferredTagWithClass(
|
||||
uti, CoreServices.kUTTagClassFilenameExtension
|
||||
)
|
||||
return ext
|
||||
|
||||
|
||||
def findfiles(pattern, path_):
|
||||
"""Returns list of filenames from path_ matched by pattern
|
||||
shell pattern. Matching is case-insensitive."""
|
||||
# See: https://gist.github.com/techtonik/5694830
|
||||
|
||||
rule = re.compile(fnmatch.translate(pattern), re.IGNORECASE)
|
||||
return [name for name in os.listdir(path_) if rule.match(name)]
|
||||
|
||||
|
||||
# TODO: this doesn't always work, still looking for a way to
|
||||
# force Photos to open the library being operated on
|
||||
# def _open_photos_library_applescript(library_path):
|
||||
@@ -299,8 +357,7 @@ def create_path_by_date(dest, dt):
|
||||
# f"""
|
||||
# on openLibrary
|
||||
# tell application "Photos"
|
||||
# activate
|
||||
# open POSIX file "{library_path}"
|
||||
# open POSIX file "{library_path}"
|
||||
# end tell
|
||||
# end openLibrary
|
||||
# """
|
||||
@@ -316,6 +373,7 @@ def _export_photo_uuid_applescript(
|
||||
edited=False,
|
||||
live_photo=False,
|
||||
timeout=120,
|
||||
burst=False,
|
||||
):
|
||||
""" Export photo to dest path using applescript to control Photos
|
||||
If photo is a live photo, exports both the photo and associated .mov file
|
||||
@@ -327,11 +385,13 @@ def _export_photo_uuid_applescript(
|
||||
If filestem.ext exists, it wil be overwritten
|
||||
original: (boolean) if True, export original image; default = True
|
||||
edited: (boolean) if True, export edited photo; default = False
|
||||
will produce an error if image does not have edits/adjustments
|
||||
If photo not edited and edited=True, will still export the original image
|
||||
caller must verify image has been edited
|
||||
*Note*: must be called with either edited or original but not both,
|
||||
will raise error if called with both edited and original = True
|
||||
live_photo: (boolean) if True, export associated .mov live photo; default = False
|
||||
timeout: timeout value in seconds; export will fail if applescript run time exceeds timeout
|
||||
burst: (boolean) set to True if file is a burst image to avoid Photos export error
|
||||
Returns: list of paths to exported file(s) or None if export failed
|
||||
Note: For Live Photos, if edited=True, will export a jpeg but not the movie, even if photo
|
||||
has not been edited. This is due to how Photos Applescript interface works.
|
||||
@@ -342,7 +402,6 @@ def _export_photo_uuid_applescript(
|
||||
"""
|
||||
on export_by_uuid(theUUID, thePath, original, edited, theTimeOut)
|
||||
tell application "Photos"
|
||||
activate
|
||||
set thePath to thePath
|
||||
set theItem to media item id theUUID
|
||||
set theFilename to filename of theItem
|
||||
@@ -390,6 +449,7 @@ def _export_photo_uuid_applescript(
|
||||
# need to find actual filename as sometimes Photos renames JPG to jpeg on export
|
||||
# may be more than one file exported (e.g. if Live Photo, Photos exports both .jpeg and .mov)
|
||||
# TemporaryDirectory will cleanup on return
|
||||
filename_stem = pathlib.Path(filename).stem
|
||||
files = glob.glob(os.path.join(tmpdir.name, "*"))
|
||||
exported_paths = []
|
||||
for fname in files:
|
||||
@@ -398,6 +458,10 @@ def _export_photo_uuid_applescript(
|
||||
# it's the .mov part of live photo but not requested, so don't export
|
||||
logging.debug(f"Skipping live photo file {path}")
|
||||
continue
|
||||
if len(files) > 1 and burst and path.stem != filename_stem:
|
||||
# skip any burst photo that's not the one we asked for
|
||||
logging.debug(f"Skipping burst photo file {path}")
|
||||
continue
|
||||
if filestem:
|
||||
# rename the file based on filestem, keeping original extension
|
||||
dest_new = dest / f"{filestem}{path.suffix}"
|
||||
@@ -443,8 +507,35 @@ def _db_is_locked(dbname):
|
||||
conn.close()
|
||||
logging.debug(f"{dbname} is not locked")
|
||||
locked = False
|
||||
except Exception as e:
|
||||
except:
|
||||
logging.debug(f"{dbname} is locked")
|
||||
locked = True
|
||||
|
||||
return locked
|
||||
|
||||
|
||||
# OSXPHOTOS_XATTR_UUID = "com.osxphotos.uuid"
|
||||
|
||||
# def get_uuid_for_file(filepath):
|
||||
# """ returns UUID associated with an exported file
|
||||
# filepath: path to exported photo
|
||||
# """
|
||||
# attr = xattr.xattr(filepath)
|
||||
# try:
|
||||
# uuid_bytes = attr[OSXPHOTOS_XATTR_UUID]
|
||||
# uuid_str = uuid_bytes.decode('utf-8')
|
||||
# except KeyError:
|
||||
# uuid_str = None
|
||||
# return uuid_str
|
||||
|
||||
# def set_uuid_for_file(filepath, uuid):
|
||||
# """ sets the UUID associated with an exported file
|
||||
# filepath: path to exported photo
|
||||
# uuid: uuid string for photo
|
||||
# """
|
||||
# if not os.path.exists(filepath):
|
||||
# raise FileNotFoundError(f"Missing file: {filepath}")
|
||||
|
||||
# attr = xattr.xattr(filepath)
|
||||
# uuid_bytes = bytes(uuid, 'utf-8')
|
||||
# attr.set(OSXPHOTOS_XATTR_UUID, uuid_bytes)
|
||||
|
||||
@@ -1,23 +1,36 @@
|
||||
altgraph==0.17
|
||||
ansimarkup==1.4.0
|
||||
appdirs==1.4.3
|
||||
astroid==2.2.5
|
||||
atomicwrites==1.3.0
|
||||
attrs==19.1.0
|
||||
better-exceptions-fork==0.2.1.post6
|
||||
# bpylist2==2.0.3;python_version<"3.8"
|
||||
https://github.com/RhetTbull/bpylist/releases/download/v2.0.3/bpylist2-2.0.3.tar.gz#egg=bpylist2;python_version<"3.8"
|
||||
bpylist2==3.0.0;python_version>="3.8"
|
||||
certifi==2019.3.9
|
||||
Click==7.0
|
||||
colorama==0.4.1
|
||||
coverage==4.5.4
|
||||
importlib-metadata==0.18
|
||||
importlib-metadata>=0.18
|
||||
isort==4.3.20
|
||||
lazy-object-proxy==1.4.1
|
||||
loguru==0.2.5
|
||||
macholib==1.14
|
||||
Mako==1.1.1
|
||||
MarkupSafe==1.1.1
|
||||
mccabe==0.6.1
|
||||
modulegraph==0.18
|
||||
more-itertools==7.2.0
|
||||
-e git+https://github.com/RhetTbull/osxphotos.git@0271b8ad9daf8b2fb80ce81e894478370e421379#egg=osxphotos
|
||||
packaging==19.0
|
||||
pathspec==0.7.0
|
||||
pathvalidate==2.2.1
|
||||
pluggy==0.12.0
|
||||
py==1.8.0
|
||||
py2app==0.21
|
||||
Pygments==2.4.2
|
||||
PyInstaller==3.6
|
||||
pyinstaller-setuptools==2019.3
|
||||
pylint==2.3.1
|
||||
pyobjc==6.0.1
|
||||
pyobjc-core==6.0.1
|
||||
@@ -134,13 +147,12 @@ pyobjc-framework-VideoToolbox==6.0.1
|
||||
pyobjc-framework-Vision==6.0.1
|
||||
pyobjc-framework-WebKit==6.0.1
|
||||
pyparsing==2.4.1.1
|
||||
pytest==5.3.1
|
||||
pytest-cov==2.8.1
|
||||
pytest-sugar==0.9.2
|
||||
PyYAML==5.1.2
|
||||
regex==2020.2.20
|
||||
six==1.12.0
|
||||
termcolor==1.1.0
|
||||
toml==0.10.0
|
||||
typed-ast==1.4.1
|
||||
wcwidth==0.1.7
|
||||
wrapt==1.11.1
|
||||
zipp==0.5.2
|
||||
Mako==1.1.1
|
||||
46
setup.py
46
setup.py
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# setup.py script for osxphotos
|
||||
#
|
||||
# Copyright (c) 2019 Rhet Turnbull, rturnbull+git@gmail.com
|
||||
# Copyright (c) 2019, 2020 Rhet Turnbull, rturnbull+git@gmail.com
|
||||
# All rights reserved.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
@@ -27,30 +27,50 @@
|
||||
# SOFTWARE.
|
||||
|
||||
import os
|
||||
import platform
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
with open(os.path.join(this_directory, "README.md"), encoding="utf-8") as f:
|
||||
long_description = f.read()
|
||||
# python version as 2-digit float (e.g. 3.6)
|
||||
py_ver = float(".".join(platform.python_version_tuple()[:2]))
|
||||
|
||||
# holds config info read from disk
|
||||
about = {}
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
# get version info from _version
|
||||
with open(
|
||||
os.path.join(this_directory, "osxphotos", "_version.py"), mode="r", encoding="utf-8"
|
||||
) as f:
|
||||
exec(f.read(), about)
|
||||
|
||||
# read README.md into long_description
|
||||
with open(os.path.join(this_directory, "README.md"), encoding="utf-8") as f:
|
||||
about["long_description"] = f.read()
|
||||
|
||||
# ugly hack to install custom version of bpylist2 needed for Python < 3.8
|
||||
# the stock version of bylist2==2.0.3 causes an error related to
|
||||
# "pkg_resources.ContextualVersionConflict: (pycodestyle 2.3.1..."
|
||||
# PEP 508 no help here as URL-based lookups not allowed in PyPI packages
|
||||
# if you know a better way, PRs welcome!
|
||||
# once I go to 3.8+ required, this won't be necessary as bpylist2 3.0+ solves this issue
|
||||
if py_ver < 3.8:
|
||||
os.system(
|
||||
"python3 -m pip install git+git://github.com/RhetTbull/bpylist2.git#egg=bpylist2"
|
||||
)
|
||||
|
||||
setup(
|
||||
name="osxphotos",
|
||||
version=about["__version__"],
|
||||
description="Manipulate (read-only) Apple's Photos app library on Mac OS X",
|
||||
long_description=long_description,
|
||||
long_description=about["long_description"],
|
||||
long_description_content_type="text/markdown",
|
||||
author="Rhet Turnbull",
|
||||
author_email="rturnbull+git@gmail.com",
|
||||
url="https://github.com/RhetTbull/",
|
||||
project_urls={"GitHub": "https://github.com/RhetTbull/osxphotos"},
|
||||
download_url="https://github.com/RhetTbull/osxphotos",
|
||||
packages=find_packages(exclude=["tests", "examples"]),
|
||||
packages=find_packages(exclude=["tests", "examples", "utils"]),
|
||||
license="License :: OSI Approved :: MIT License",
|
||||
classifiers=[
|
||||
"Development Status :: 4 - Beta",
|
||||
@@ -58,9 +78,19 @@ setup(
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
],
|
||||
install_requires=["pyobjc>=6.0.1", "Click>=7", "PyYAML>=5.1.2", "Mako>=1.1.1"],
|
||||
install_requires=[
|
||||
"pyobjc>=6.0.1",
|
||||
"Click>=7",
|
||||
"PyYAML>=5.1.2",
|
||||
"Mako>=1.1.1",
|
||||
"bpylist2==2.0.3;python_version<'3.8'",
|
||||
"bpylist2==3.0.0;python_version>='3.8'",
|
||||
"pathvalidate==2.2.1",
|
||||
"dataclasses==0.7;python_version<'3.7'",
|
||||
],
|
||||
entry_points={"console_scripts": ["osxphotos=osxphotos.__main__:cli"]},
|
||||
include_package_data=True,
|
||||
)
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
# Tests for osxphotos #
|
||||
|
||||
## Running Tests ##
|
||||
Tests require pytest:
|
||||
Tests require pytest and pytest-mock:
|
||||
`pip install pytest`
|
||||
`pip install pytest-mock`
|
||||
|
||||
To run the tests, do the following from the main source folder:
|
||||
`python -m pytest tests/`
|
||||
|
||||
Running the tests this way allows the library to be tested without installing it.
|
||||
|
||||
## Skipped Tests ##
|
||||
A few tests will look for certain environment variables to determine if they should run.
|
||||
|
||||
Some of the export tests rely on photos in my local library and will look for `OSXPHOTOS_TEST_EXPORT=1` to determine if they should run.
|
||||
|
||||
One test for locale does not run on GitHub's automated workflow and will look for `OSXPHOTOS_TEST_LOCALE=1` to determine if it should be run. If you want to run this test, set the environment variable.
|
||||
|
||||
## Attribution ##
|
||||
These tests utilize a test Photos library. The test library is populated with photos from [flickr](https://www.flickr.com). All images used are licensed under Creative Commons 2.0 Attribution [license](https://creativecommons.org/licenses/by/2.0/).
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
|
After Width: | Height: | Size: 2.8 MiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -5,7 +5,7 @@
|
||||
<key>LithiumMessageTracer</key>
|
||||
<dict>
|
||||
<key>LastReportedDate</key>
|
||||
<date>2019-12-08T16:44:38Z</date>
|
||||
<date>2020-04-17T18:39:50Z</date>
|
||||
</dict>
|
||||
<key>PXPeopleScreenUnlocked</key>
|
||||
<true/>
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>PhotoAnalysisGraphLastBackgroundGraphRebuildJobDate</key>
|
||||
<date>2020-01-22T02:10:26Z</date>
|
||||
<date>2020-04-17T18:40:46Z</date>
|
||||
<key>PhotoAnalysisGraphLastBackgroundMemoryGenerationJobDate</key>
|
||||
<date>2020-01-22T02:10:27Z</date>
|
||||
<date>2020-04-17T18:39:51Z</date>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
Binary file not shown.
Binary file not shown.
|
After Width: | Height: | Size: 3.1 MiB |
@@ -11,6 +11,6 @@
|
||||
<key>PLLastRevGeoForcedProviderOutOfDateCheckVersionKey</key>
|
||||
<integer>1</integer>
|
||||
<key>PLLastRevGeoVerFileFetchDateKey</key>
|
||||
<date>2020-01-19T17:29:28Z</date>
|
||||
<date>2020-04-17T18:39:52Z</date>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>LastHistoryRowId</key>
|
||||
<integer>414</integer>
|
||||
<integer>502</integer>
|
||||
<key>LibraryBuildTag</key>
|
||||
<string>E3E46F2A-7168-4973-AB3E-5848F80BFC7D</string>
|
||||
<key>LibrarySchemaVersion</key>
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 26 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 147 KiB |
@@ -9,7 +9,7 @@
|
||||
<key>HistoricalMarker</key>
|
||||
<dict>
|
||||
<key>LastHistoryRowId</key>
|
||||
<integer>414</integer>
|
||||
<integer>502</integer>
|
||||
<key>LibraryBuildTag</key>
|
||||
<string>E3E46F2A-7168-4973-AB3E-5848F80BFC7D</string>
|
||||
<key>LibrarySchemaVersion</key>
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -8,8 +8,11 @@
|
||||
<array/>
|
||||
<key>ExpandedSidebarItemIdentifiers</key>
|
||||
<array>
|
||||
<string>obfeGcvoT1auxoh2Tu86OQ</string>
|
||||
<string>TopLevelAlbums</string>
|
||||
<string>TopLevelSlideshows</string>
|
||||
<string>MBS8+gBrQCWQxmcav+C8HQ</string>
|
||||
<string>cHwwVoUiQ8a2nZNXgVsnCA</string>
|
||||
</array>
|
||||
<key>IPXWorkspaceControllerZoomLevelsKey</key>
|
||||
<dict>
|
||||
@@ -23,11 +26,11 @@
|
||||
<key>key</key>
|
||||
<integer>1</integer>
|
||||
<key>lastKnownDisplayName</key>
|
||||
<string>September 28, 2018</string>
|
||||
<string>Pumpkin Farm (1)</string>
|
||||
<key>type</key>
|
||||
<string>album</string>
|
||||
<key>uuid</key>
|
||||
<string>+Ep8CrNRRhea9eVA618FMg</string>
|
||||
<string>AU8Gp8bwRlOvngZFgwXBdg</string>
|
||||
</dict>
|
||||
<key>lastKnownItemCounts</key>
|
||||
<dict>
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>PhotoAnalysisGraphLastBackgroundGraphRebuildJobDate</key>
|
||||
<date>2019-07-28T01:23:52Z</date>
|
||||
<date>2020-04-30T14:09:38Z</date>
|
||||
<key>PhotoAnalysisGraphLastBackgroundMemoryGenerationJobDate</key>
|
||||
<date>2019-07-28T01:23:52Z</date>
|
||||
<date>2020-05-01T04:27:48Z</date>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -2,7 +2,11 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>ProcessedInQuiescentState</key>
|
||||
<true/>
|
||||
<key>SuggestedMeIdentifier</key>
|
||||
<string></string>
|
||||
<key>Version</key>
|
||||
<integer>3</integer>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<key>LithiumMessageTracer</key>
|
||||
<dict>
|
||||
<key>LastReportedDate</key>
|
||||
<date>2019-07-27T12:01:15Z</date>
|
||||
<date>2020-05-01T03:34:50Z</date>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
<key>PLLastRevGeoForcedProviderOutOfDateCheckVersionKey</key>
|
||||
<integer>1</integer>
|
||||
<key>PLLastRevGeoVerFileFetchDateKey</key>
|
||||
<date>2019-07-26T20:15:18Z</date>
|
||||
<date>2020-04-30T12:51:41Z</date>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>LastHistoryRowId</key>
|
||||
<integer>615</integer>
|
||||
<integer>670</integer>
|
||||
<key>LibraryBuildTag</key>
|
||||
<string>BEA5F0E8-BA6B-4462-8F73-3E53BBE4C943</string>
|
||||
<key>LibrarySchemaVersion</key>
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<key>HistoricalMarker</key>
|
||||
<dict>
|
||||
<key>LastHistoryRowId</key>
|
||||
<integer>615</integer>
|
||||
<integer>670</integer>
|
||||
<key>LibraryBuildTag</key>
|
||||
<string>BEA5F0E8-BA6B-4462-8F73-3E53BBE4C943</string>
|
||||
<key>LibrarySchemaVersion</key>
|
||||
@@ -24,7 +24,7 @@
|
||||
<key>SnapshotCompletedDate</key>
|
||||
<date>2019-07-26T20:15:17Z</date>
|
||||
<key>SnapshotLastValidated</key>
|
||||
<date>2019-07-27T12:01:15Z</date>
|
||||
<date>2020-05-01T03:34:50Z</date>
|
||||
<key>SnapshotTables</key>
|
||||
<dict/>
|
||||
</dict>
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -9,12 +9,14 @@
|
||||
<key>ExpandedSidebarItemIdentifiers</key>
|
||||
<array>
|
||||
<string>TopLevelAlbums</string>
|
||||
<string>QtSnVvTkQ%i2z3hB834M1A</string>
|
||||
<string>TopLevelSlideshows</string>
|
||||
<string>N7eQ4VhfTfeHFp9PPHaJDw</string>
|
||||
</array>
|
||||
<key>IPXWorkspaceControllerZoomLevelsKey</key>
|
||||
<dict>
|
||||
<key>kZoomLevelIdentifierAlbums</key>
|
||||
<integer>10</integer>
|
||||
<integer>5</integer>
|
||||
<key>kZoomLevelIdentifierVersions</key>
|
||||
<integer>7</integer>
|
||||
</dict>
|
||||
@@ -23,11 +25,11 @@
|
||||
<key>key</key>
|
||||
<integer>1</integer>
|
||||
<key>lastKnownDisplayName</key>
|
||||
<string>Test Album (1)</string>
|
||||
<string>Pumpkin Farm (1)</string>
|
||||
<key>type</key>
|
||||
<string>album</string>
|
||||
<key>uuid</key>
|
||||
<string>Uq6qsKihRRSjMHTiD+0Azg</string>
|
||||
<string>xJ8ya3NBRWC24gKhcwwNeQ</string>
|
||||
</dict>
|
||||
<key>lastKnownItemCounts</key>
|
||||
<dict>
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>PhotoAnalysisGraphLastBackgroundGraphRebuildJobDate</key>
|
||||
<date>2020-01-29T06:24:15Z</date>
|
||||
<date>2020-04-25T23:54:43Z</date>
|
||||
<key>PhotoAnalysisGraphLastBackgroundMemoryGenerationJobDate</key>
|
||||
<date>2020-01-29T13:44:20Z</date>
|
||||
<date>2020-04-26T06:26:10Z</date>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -5,7 +5,7 @@
|
||||
<key>LithiumMessageTracer</key>
|
||||
<dict>
|
||||
<key>LastReportedDate</key>
|
||||
<date>2020-01-19T14:48:46Z</date>
|
||||
<date>2020-04-17T17:51:16Z</date>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
<key>PLLastRevGeoForcedProviderOutOfDateCheckVersionKey</key>
|
||||
<integer>1</integer>
|
||||
<key>PLLastRevGeoVerFileFetchDateKey</key>
|
||||
<date>2020-01-29T06:24:08Z</date>
|
||||
<date>2020-04-25T23:54:29Z</date>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>LastHistoryRowId</key>
|
||||
<integer>575</integer>
|
||||
<integer>606</integer>
|
||||
<key>LibraryBuildTag</key>
|
||||
<string>D8C4AAA1-3AB6-4A65-BEBD-99CC3E5D433E</string>
|
||||
<key>LibrarySchemaVersion</key>
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<key>HistoricalMarker</key>
|
||||
<dict>
|
||||
<key>LastHistoryRowId</key>
|
||||
<integer>575</integer>
|
||||
<integer>606</integer>
|
||||
<key>LibraryBuildTag</key>
|
||||
<string>D8C4AAA1-3AB6-4A65-BEBD-99CC3E5D433E</string>
|
||||
<key>LibrarySchemaVersion</key>
|
||||
@@ -24,7 +24,7 @@
|
||||
<key>SnapshotCompletedDate</key>
|
||||
<date>2019-07-27T13:16:43Z</date>
|
||||
<key>SnapshotLastValidated</key>
|
||||
<date>2020-01-29T06:26:14Z</date>
|
||||
<date>2020-04-25T23:56:35Z</date>
|
||||
<key>SnapshotTables</key>
|
||||
<dict/>
|
||||
</dict>
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user