2018-12-03 10:36:10 +00:00
|
|
|
__author__ = "Peter Molnar"
|
2019-01-05 11:55:40 +00:00
|
|
|
__copyright__ = "Copyright 2017-2019, Peter Molnar"
|
2018-12-03 10:36:10 +00:00
|
|
|
__license__ = "apache-2.0"
|
|
|
|
__maintainer__ = "Peter Molnar"
|
|
|
|
__email__ = "mail@petermolnar.net"
|
|
|
|
|
2018-07-20 16:45:42 +01:00
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import json
|
|
|
|
import os
|
2018-12-11 14:06:18 +00:00
|
|
|
import logging
|
2018-12-27 19:48:06 +00:00
|
|
|
import requests
|
|
|
|
import keys
|
|
|
|
import settings
|
2018-12-01 10:43:13 +00:00
|
|
|
|
2018-07-20 16:45:42 +01:00
|
|
|
EXIFDATE = re.compile(
|
2019-06-25 22:48:04 +01:00
|
|
|
r"^(?P<year>[0-9]{4}):(?P<month>[0-9]{2}):(?P<day>[0-9]{2})\s+"
|
|
|
|
r"(?P<time>[0-9]{2}:[0-9]{2}:[0-9]{2})$"
|
2018-07-20 16:45:42 +01:00
|
|
|
)
|
|
|
|
|
2019-06-25 22:48:04 +01:00
|
|
|
|
2018-12-01 10:43:13 +00:00
|
|
|
class CachedMeta(dict):
|
2018-07-20 16:45:42 +01:00
|
|
|
def __init__(self, fpath):
|
|
|
|
self.fpath = fpath
|
|
|
|
|
|
|
|
@property
|
|
|
|
def cfile(self):
|
2018-12-27 19:48:06 +00:00
|
|
|
fname = os.path.basename(self.fpath)
|
2019-06-25 22:48:04 +01:00
|
|
|
if fname == "index.md":
|
2018-12-27 19:48:06 +00:00
|
|
|
fname = os.path.basename(os.path.dirname(self.fpath))
|
|
|
|
|
2018-07-20 16:45:42 +01:00
|
|
|
return os.path.join(
|
2019-06-25 22:48:04 +01:00
|
|
|
settings.tmpdir, "%s.%s.json" % (fname, self.__class__.__name__)
|
2018-07-20 16:45:42 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _is_cached(self):
|
|
|
|
if os.path.exists(self.cfile):
|
|
|
|
mtime = os.path.getmtime(self.fpath)
|
|
|
|
ctime = os.path.getmtime(self.cfile)
|
|
|
|
if ctime >= mtime:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _read(self):
|
|
|
|
if not self._is_cached:
|
2018-12-01 10:43:13 +00:00
|
|
|
self._call_tool()
|
2018-07-20 16:45:42 +01:00
|
|
|
self._cache_update()
|
|
|
|
else:
|
|
|
|
self._cache_read()
|
|
|
|
|
|
|
|
def _cache_update(self):
|
2019-06-25 22:48:04 +01:00
|
|
|
with open(self.cfile, "wt") as f:
|
2018-12-27 19:48:06 +00:00
|
|
|
logging.debug(
|
2019-06-25 22:48:04 +01:00
|
|
|
"writing cached meta file of %s to %s", self.fpath, self.cfile
|
2018-12-27 19:48:06 +00:00
|
|
|
)
|
2018-07-20 16:45:42 +01:00
|
|
|
f.write(json.dumps(self, indent=4, sort_keys=True))
|
|
|
|
|
|
|
|
def _cache_read(self):
|
2019-06-25 22:48:04 +01:00
|
|
|
with open(self.cfile, "rt") as f:
|
2018-07-20 16:45:42 +01:00
|
|
|
data = json.loads(f.read())
|
|
|
|
for k, v in data.items():
|
2018-12-01 10:43:13 +00:00
|
|
|
self[k] = v
|
|
|
|
|
|
|
|
|
|
|
|
class Exif(CachedMeta):
|
|
|
|
def __init__(self, fpath):
|
|
|
|
self.fpath = fpath
|
|
|
|
self._read()
|
2018-07-20 16:45:42 +01:00
|
|
|
|
2018-12-01 10:43:13 +00:00
|
|
|
def _call_tool(self):
|
2018-07-20 16:45:42 +01:00
|
|
|
"""
|
|
|
|
Why like this: the # on some of the params forces exiftool to
|
|
|
|
display values like decimals, so the latitude / longitude params
|
|
|
|
can be used and parsed in a sane way
|
|
|
|
|
|
|
|
If only -json is passed, it gets everything nicely, but in the default
|
|
|
|
format, which would require another round to parse
|
|
|
|
|
|
|
|
"""
|
|
|
|
cmd = (
|
|
|
|
"exiftool",
|
2019-06-25 22:48:04 +01:00
|
|
|
"-sort",
|
|
|
|
"-json",
|
|
|
|
"-MIMEType",
|
|
|
|
"-FileType",
|
|
|
|
"-FileName",
|
|
|
|
"-FileSize#",
|
|
|
|
"-ModifyDate",
|
|
|
|
"-CreateDate",
|
|
|
|
"-DateTimeOriginal",
|
|
|
|
"-ImageHeight",
|
|
|
|
"-ImageWidth",
|
|
|
|
"-Aperture",
|
|
|
|
"-FOV",
|
|
|
|
"-ISO",
|
|
|
|
"-FocalLength",
|
|
|
|
"-FNumber",
|
|
|
|
"-FocalLengthIn35mmFormat",
|
|
|
|
"-ExposureTime",
|
|
|
|
"-Model",
|
|
|
|
"-GPSLongitude#",
|
|
|
|
"-GPSLatitude#",
|
|
|
|
"-LensID",
|
|
|
|
"-LensSpec",
|
|
|
|
"-Lens",
|
|
|
|
"-ReleaseDate",
|
|
|
|
"-Description",
|
|
|
|
"-Headline",
|
|
|
|
"-HierarchicalSubject",
|
|
|
|
"-Copyright",
|
|
|
|
"-Artist",
|
|
|
|
self.fpath,
|
2018-07-20 16:45:42 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
p = subprocess.Popen(
|
2019-06-25 22:48:04 +01:00
|
|
|
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
2018-07-20 16:45:42 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
if stderr:
|
|
|
|
raise OSError("Error reading EXIF:\n\t%s\n\t%s", cmd, stderr)
|
|
|
|
|
2019-06-25 22:48:04 +01:00
|
|
|
exif = json.loads(stdout.decode("utf-8").strip()).pop()
|
|
|
|
if "ReleaseDate" in exif and "ReleaseTime" in exif:
|
|
|
|
exif["DateTimeRelease"] = "%s %s" % (
|
|
|
|
exif.get("ReleaseDate"),
|
|
|
|
exif.get("ReleaseTime")[:8],
|
2018-07-20 16:45:42 +01:00
|
|
|
)
|
2019-06-25 22:48:04 +01:00
|
|
|
del exif["ReleaseDate"]
|
|
|
|
del exif["ReleaseTime"]
|
2018-07-20 16:45:42 +01:00
|
|
|
|
|
|
|
for k, v in exif.items():
|
|
|
|
self[k] = self.exifdate2rfc(v)
|
|
|
|
|
|
|
|
def exifdate2rfc(self, value):
|
|
|
|
""" converts and EXIF date string to RFC 3339 format
|
|
|
|
|
|
|
|
:param value: EXIF date (2016:05:01 00:08:24)
|
|
|
|
:type arg1: str
|
|
|
|
:return: RFC 3339 string with UTC timezone 2016-05-01T00:08:24+00:00
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
if not isinstance(value, str):
|
|
|
|
return value
|
|
|
|
match = EXIFDATE.match(value)
|
|
|
|
if not match:
|
|
|
|
return value
|
|
|
|
return "%s-%s-%sT%s+00:00" % (
|
2019-06-25 22:48:04 +01:00
|
|
|
match.group("year"),
|
|
|
|
match.group("month"),
|
|
|
|
match.group("day"),
|
|
|
|
match.group("time"),
|
2018-07-20 16:45:42 +01:00
|
|
|
)
|