post-black

This commit is contained in:
Peter Molnar 2019-06-25 23:48:04 +02:00
parent 1c893cf678
commit 678a7f7503
4 changed files with 906 additions and 1162 deletions

109
meta.py
View file

@ -14,10 +14,11 @@ import keys
import settings
EXIFDATE = re.compile(
r'^(?P<year>[0-9]{4}):(?P<month>[0-9]{2}):(?P<day>[0-9]{2})\s+'
r'(?P<time>[0-9]{2}:[0-9]{2}:[0-9]{2})$'
r"^(?P<year>[0-9]{4}):(?P<month>[0-9]{2}):(?P<day>[0-9]{2})\s+"
r"(?P<time>[0-9]{2}:[0-9]{2}:[0-9]{2})$"
)
class CachedMeta(dict):
def __init__(self, fpath):
self.fpath = fpath
@ -25,15 +26,11 @@ class CachedMeta(dict):
@property
def cfile(self):
fname = os.path.basename(self.fpath)
if fname == 'index.md':
if fname == "index.md":
fname = os.path.basename(os.path.dirname(self.fpath))
return os.path.join(
settings.tmpdir,
"%s.%s.json" % (
fname,
self.__class__.__name__,
)
settings.tmpdir, "%s.%s.json" % (fname, self.__class__.__name__)
)
@property
@ -53,16 +50,14 @@ class CachedMeta(dict):
self._cache_read()
def _cache_update(self):
with open(self.cfile, 'wt') as f:
with open(self.cfile, "wt") as f:
logging.debug(
"writing cached meta file of %s to %s",
self.fpath,
self.cfile
"writing cached meta file of %s to %s", self.fpath, self.cfile
)
f.write(json.dumps(self, indent=4, sort_keys=True))
def _cache_read(self):
with open(self.cfile, 'rt') as f:
with open(self.cfile, "rt") as f:
data = json.loads(f.read())
for k, v in data.items():
self[k] = v
@ -85,57 +80,55 @@ class Exif(CachedMeta):
"""
cmd = (
"exiftool",
'-sort',
'-json',
'-MIMEType',
'-FileType',
'-FileName',
'-FileSize#',
'-ModifyDate',
'-CreateDate',
'-DateTimeOriginal',
'-ImageHeight',
'-ImageWidth',
'-Aperture',
'-FOV',
'-ISO',
'-FocalLength',
'-FNumber',
'-FocalLengthIn35mmFormat',
'-ExposureTime',
'-Model',
'-GPSLongitude#',
'-GPSLatitude#',
'-LensID',
'-LensSpec',
'-Lens',
'-ReleaseDate',
'-Description',
'-Headline',
'-HierarchicalSubject',
'-Copyright',
'-Artist',
self.fpath
"-sort",
"-json",
"-MIMEType",
"-FileType",
"-FileName",
"-FileSize#",
"-ModifyDate",
"-CreateDate",
"-DateTimeOriginal",
"-ImageHeight",
"-ImageWidth",
"-Aperture",
"-FOV",
"-ISO",
"-FocalLength",
"-FNumber",
"-FocalLengthIn35mmFormat",
"-ExposureTime",
"-Model",
"-GPSLongitude#",
"-GPSLatitude#",
"-LensID",
"-LensSpec",
"-Lens",
"-ReleaseDate",
"-Description",
"-Headline",
"-HierarchicalSubject",
"-Copyright",
"-Artist",
self.fpath,
)
p = subprocess.Popen(
cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, stderr = p.communicate()
if stderr:
raise OSError("Error reading EXIF:\n\t%s\n\t%s", cmd, stderr)
exif = json.loads(stdout.decode('utf-8').strip()).pop()
if 'ReleaseDate' in exif and 'ReleaseTime' in exif:
exif['DateTimeRelease'] = "%s %s" % (
exif.get('ReleaseDate'), exif.get('ReleaseTime')[:8]
exif = json.loads(stdout.decode("utf-8").strip()).pop()
if "ReleaseDate" in exif and "ReleaseTime" in exif:
exif["DateTimeRelease"] = "%s %s" % (
exif.get("ReleaseDate"),
exif.get("ReleaseTime")[:8],
)
del(exif['ReleaseDate'])
del(exif['ReleaseTime'])
del exif["ReleaseDate"]
del exif["ReleaseTime"]
for k, v in exif.items():
self[k] = self.exifdate2rfc(v)
@ -154,8 +147,8 @@ class Exif(CachedMeta):
if not match:
return value
return "%s-%s-%sT%s+00:00" % (
match.group('year'),
match.group('month'),
match.group('day'),
match.group('time')
match.group("year"),
match.group("month"),
match.group("day"),
match.group("time"),
)

1455
nasg.py

File diff suppressed because it is too large Load diff

139
pandoc.py
View file

@ -11,10 +11,11 @@ import hashlib
import os
import settings
class Pandoc(str):
in_format = 'html'
in_format = "html"
in_options = []
out_format = 'plain'
out_format = "plain"
out_options = []
columns = None
@ -25,18 +26,14 @@ class Pandoc(str):
@property
def cachefile(self):
return os.path.join(
settings.tmpdir,
"%s_%s.pandoc" % (
self.__class__.__name__,
self.hash
)
settings.tmpdir, "%s_%s.pandoc" % (self.__class__.__name__, self.hash)
)
@property
def cache(self):
if not os.path.exists(self.cachefile):
return False
with open(self.cachefile, 'rt') as f:
with open(self.cachefile, "rt") as f:
self.result = f.read()
return True
@ -44,38 +41,26 @@ class Pandoc(str):
self.source = text
if self.cache:
return
conv_to = '--to=%s' % (self.out_format)
if (len(self.out_options)):
conv_to = '%s+%s' % (
conv_to,
'+'.join(self.out_options)
)
conv_to = "--to=%s" % (self.out_format)
if len(self.out_options):
conv_to = "%s+%s" % (conv_to, "+".join(self.out_options))
conv_from = "--from=%s" % (self.in_format)
if len(self.in_options):
conv_from = "%s+%s" % (conv_from, "+".join(self.in_options))
conv_from = '--from=%s' % (self.in_format)
if (len(self.in_options)):
conv_from = '%s+%s' % (
conv_from,
'+'.join(self.in_options)
)
is_pandoc_version2 = False
try:
version = subprocess.check_output(['pandoc', '-v'])
if version.startswith(b'pandoc 2'):
version = subprocess.check_output(["pandoc", "-v"])
if version.startswith(b"pandoc 2"):
is_pandoc_version2 = True
except OSError:
print("Error: pandoc is not installed!")
cmd = [
'pandoc',
'-o-',
conv_to,
conv_from,
'--no-highlight'
]
cmd = ["pandoc", "-o-", conv_to, conv_from, "--no-highlight"]
if is_pandoc_version2:
# Only pandoc v2 and higher support quiet param
cmd.append('--quiet')
cmd.append("--quiet")
if self.columns:
cmd.append(self.columns)
@ -89,14 +74,10 @@ class Pandoc(str):
stdout, stderr = p.communicate(input=text.encode())
if stderr:
logging.warning(
"Error during pandoc covert:\n\t%s\n\t%s",
cmd,
stderr
)
r = stdout.decode('utf-8').strip()
logging.warning("Error during pandoc covert:\n\t%s\n\t%s", cmd, stderr)
r = stdout.decode("utf-8").strip()
self.result = r
with open(self.cachefile, 'wt') as f:
with open(self.cachefile, "wt") as f:
f.write(self.result)
def __str__(self):
@ -107,65 +88,65 @@ class Pandoc(str):
class PandocMD2HTML(Pandoc):
in_format = 'markdown'
in_format = "markdown"
in_options = [
'footnotes',
'pipe_tables',
'strikeout',
"footnotes",
"pipe_tables",
"strikeout",
# 'superscript',
# 'subscript',
'raw_html',
'definition_lists',
'backtick_code_blocks',
'fenced_code_attributes',
'shortcut_reference_links',
'lists_without_preceding_blankline',
'autolink_bare_uris',
"raw_html",
"definition_lists",
"backtick_code_blocks",
"fenced_code_attributes",
"shortcut_reference_links",
"lists_without_preceding_blankline",
"autolink_bare_uris",
]
out_format = 'html5'
out_format = "html5"
out_options = []
class PandocHTML2MD(Pandoc):
in_format = 'html'
in_format = "html"
in_options = []
out_format = 'markdown'
out_format = "markdown"
out_options = [
'footnotes',
'pipe_tables',
'strikeout',
'raw_html',
'definition_lists',
'backtick_code_blocks',
'fenced_code_attributes',
'shortcut_reference_links',
'lists_without_preceding_blankline',
'autolink_bare_uris',
"footnotes",
"pipe_tables",
"strikeout",
"raw_html",
"definition_lists",
"backtick_code_blocks",
"fenced_code_attributes",
"shortcut_reference_links",
"lists_without_preceding_blankline",
"autolink_bare_uris",
]
class PandocMD2TXT(Pandoc):
in_format = 'markdown'
in_format = "markdown"
in_options = [
'footnotes',
'pipe_tables',
'strikeout',
'raw_html',
'definition_lists',
'backtick_code_blocks',
'fenced_code_attributes',
'shortcut_reference_links',
'lists_without_preceding_blankline',
'autolink_bare_uris',
"footnotes",
"pipe_tables",
"strikeout",
"raw_html",
"definition_lists",
"backtick_code_blocks",
"fenced_code_attributes",
"shortcut_reference_links",
"lists_without_preceding_blankline",
"autolink_bare_uris",
]
out_format = 'plain'
out_format = "plain"
out_options = []
columns = '--columns=80'
columns = "--columns=80"
class PandocHTML2TXT(Pandoc):
in_format = 'html'
in_format = "html"
in_options = []
out_format = 'plain'
out_format = "plain"
out_options = []
columns = '--columns=80'
columns = "--columns=80"

View file

@ -17,231 +17,220 @@ class struct(dict):
__delattr__ = dict.__delitem__
base = os.path.abspath(os.path.expanduser('~/Projects/petermolnar.net'))
syncserver = 'liveserver:/web/petermolnar.net'
base = os.path.abspath(os.path.expanduser("~/Projects/petermolnar.net"))
syncserver = "liveserver:/web/petermolnar.net"
pagination = 42
notinfeed = ['note']
flat = ['article', 'journal']
displaydate = 'YYYY-MM-DD HH:mm'
notinfeed = ["note"]
flat = ["article", "journal"]
displaydate = "YYYY-MM-DD HH:mm"
mementostartime = 1561192582
licence = struct({
'article': 'CC-BY-4.0',
'journal': 'CC-BY-NC-4.0',
'_default': 'CC-BY-NC-ND-4.0'
})
licence = struct(
{"article": "CC-BY-4.0", "journal": "CC-BY-NC-4.0", "_default": "CC-BY-NC-ND-4.0"}
)
author = struct({
"@context": "http://schema.org",
"@type": "Person",
"image": "https://petermolnar.net/favicon.jpg",
"email": "mail@petermolnar.net",
"url": "https://petermolnar.net/",
"name": "Peter Molnar"
})
site = struct({
"@context": "http://schema.org",
"@type": "WebSite",
"headline": "Peter Molnar",
"url": "https://petermolnar.net",
"name": "petermolnar.net",
"image": "https://petermolnar.net/favicon.ico",
"license": "https://spdx.org/licenses/%s.html" % (licence['_default']),
"sameAs": [
],
"author": {
author = struct(
{
"@context": "http://schema.org",
"@type": "Person",
"image": "https://petermolnar.net/favicon.jpg",
"email": "mail@petermolnar.net",
"url": "https://petermolnar.net/",
"name": "Peter Molnar",
"sameAs": [
"https://github.com/petermolnar",
"https://petermolnar.net/cv.html",
"xmpp:mail@petermolnar.net",
"https://wa.me/447592011721",
"https://t.me/petermolnar",
"https://twitter.com/petermolnar"
],
"follows": "https://petermolnar.net/following.opml"
},
"publisher": {
"@context": "http://schema.org",
"@type": "Organization",
"logo": {
"@context": "http://schema.org",
"@type": "ImageObject",
"url": "https://petermolnar.net/favicon.jpg"
},
"url": "https://petermolnar.net/",
"name": "petermolnar.net",
"email": "webmaster@petermolnar.net"
},
"potentialAction": [
{
"@context": "http://schema.org",
"@type": "SearchAction",
"target": "https://petermolnar.net/search.php?q={q}",
"query-input": "required name=q",
"url": "https://petermolnar.net/search.php"
},
{
"@context": "http://schema.org",
"@type": "FollowAction",
"url": "https://petermolnar.net/follow/",
"name": "follow"
},
{
"@context": "http://schema.org",
"@type": "DonateAction",
"description": "Monzo",
"name": "monzo",
"url": "https://monzo.me/petermolnar/",
"recipient": author
},
{
"@context": "http://schema.org",
"@type": "DonateAction",
"description": "Paypal",
"name": "paypal",
"url": "https://paypal.me/petermolnar/",
"recipient": author
}
]
})
menu = {
'home': {
'url': '%s/' % site['url'],
'text': 'home',
},
'photo': {
'url': '%s/category/photo/' % site['url'],
'text': 'photos',
},
'journal': {
'url': '%s/category/journal/' % site['url'],
'text': 'journal',
},
'article': {
'url': '%s/category/article/' % site['url'],
'text': 'IT',
},
'note': {
'url': '%s/category/note/' % site['url'],
'text': 'notes'
}
}
)
meta = struct({
'webmention': 'https://webmention.io/petermolnar.net/webmention',
'pingback': 'https://webmention.io/petermolnar.net/xmlrpc',
'hub': 'https://petermolnar.superfeedr.com/',
'authorization_endpoint': 'https://indieauth.com/auth',
'token_endpoint': 'https://tokens.indieauth.com/token',
'micropub': 'https://petermolnar.net/micropub.php',
#'microsub': 'https://aperture.p3k.io/microsub/83'
})
site = struct(
{
"@context": "http://schema.org",
"@type": "WebSite",
"headline": "Peter Molnar",
"url": "https://petermolnar.net",
"name": "petermolnar.net",
"image": "https://petermolnar.net/favicon.ico",
"license": "https://spdx.org/licenses/%s.html" % (licence["_default"]),
"sameAs": [],
"author": {
"@context": "http://schema.org",
"@type": "Person",
"image": "https://petermolnar.net/favicon.jpg",
"email": "mail@petermolnar.net",
"url": "https://petermolnar.net/",
"name": "Peter Molnar",
"sameAs": [
"https://github.com/petermolnar",
"https://petermolnar.net/cv.html",
"xmpp:mail@petermolnar.net",
"https://wa.me/447592011721",
"https://t.me/petermolnar",
"https://twitter.com/petermolnar",
],
"follows": "https://petermolnar.net/following.opml",
},
"publisher": {
"@context": "http://schema.org",
"@type": "Organization",
"logo": {
"@context": "http://schema.org",
"@type": "ImageObject",
"url": "https://petermolnar.net/favicon.jpg",
},
"url": "https://petermolnar.net/",
"name": "petermolnar.net",
"email": "webmaster@petermolnar.net",
},
"potentialAction": [
{
"@context": "http://schema.org",
"@type": "SearchAction",
"target": "https://petermolnar.net/search.php?q={q}",
"query-input": "required name=q",
"url": "https://petermolnar.net/search.php",
},
{
"@context": "http://schema.org",
"@type": "FollowAction",
"url": "https://petermolnar.net/follow/",
"name": "follow",
},
{
"@context": "http://schema.org",
"@type": "DonateAction",
"description": "Monzo",
"name": "monzo",
"url": "https://monzo.me/petermolnar/",
"recipient": author,
},
{
"@context": "http://schema.org",
"@type": "DonateAction",
"description": "Paypal",
"name": "paypal",
"url": "https://paypal.me/petermolnar/",
"recipient": author,
},
],
}
)
paths = struct({
'content': os.path.join(base, 'content'),
'tmpl': os.path.join(base, 'nasg', 'templates'),
'watermark': os.path.join(base, 'nasg', 'templates', 'watermark.png'),
'build': os.path.join(base, 'www'),
'queue': os.path.join(base, 'queue'),
'remotewww': 'web',
'remotequeue': 'queue',
'micropub': os.path.join(base, 'content', 'note'),
'home': os.path.join(base, 'content', 'home', 'index.md'),
'category': 'category',
'feed': 'feed'
})
filenames = struct({
'rss': 'index.xml',
'atom': 'atom.xml',
'json': 'index.json',
'md': 'index.md',
'txt': 'index.txt',
'html': 'index.html',
'gopher': 'gophermap',
'oembed_xml': 'oembed.xml',
'oembed_json': 'oembed.json',
'memento': 'memento.html',
'sitemap': 'sitemap.xml'
})
menu = stuct(
{
"home": {"url": "%s/" % site["url"], "text": "home"},
"photo": {"url": "%s/category/photo/" % site["url"], "text": "photos"},
"journal": {"url": "%s/category/journal/" % site["url"], "text": "journal"},
"article": {"url": "%s/category/article/" % site["url"], "text": "IT"},
"note": {"url": "%s/category/note/" % site["url"], "text": "notes"},
}
)
datignore = [
'.git',
'.dat',
'**.php'
]
meta = struct(
{
"webmention": "https://webmention.io/petermolnar.net/webmention",
"pingback": "https://webmention.io/petermolnar.net/xmlrpc",
"hub": "https://petermolnar.superfeedr.com/",
"authorization_endpoint": "https://indieauth.com/auth",
"token_endpoint": "https://tokens.indieauth.com/token",
"micropub": "https://petermolnar.net/micropub.php",
#'microsub': 'https://aperture.p3k.io/microsub/83'
}
)
photo = struct({
're_author': re.compile(r'(?:P[eé]ter Moln[aá]r)|(?:Moln[aá]r P[eé]ter)|(?:petermolnar\.(?:eu|net))'),
'default': 720,
'sizes': {
#90 = s
#360 = m
720: '',
1280: '_b',
},
'earlyyears': 2014
})
paths = struct(
{
"content": os.path.join(base, "content"),
"tmpl": os.path.join(base, "nasg", "templates"),
"watermark": os.path.join(base, "nasg", "templates", "watermark.png"),
"build": os.path.join(base, "www"),
"queue": os.path.join(base, "queue"),
"remotewww": "web",
"remotequeue": "queue",
"micropub": os.path.join(base, "content", "note"),
"home": os.path.join(base, "content", "home", "index.md"),
"category": "category",
"feed": "feed",
}
)
#symlinks = {
#'files/a-view-from-barbican-1280x720.jpg': 'a-view-from-barbican/a-view-from-barbican_b.jpg',
#'files/hills_from_beachy_head-540x226.jpg': 'hills-from-beachy-head/hills-from-beachy-head.jpg',
#'files/seven_sisters_from_beachy_head-540x304.jpg': 'seven-sisters-from-beachy-head/seven-sisters-from-beachy-head.jpg',
#'files/the_countryside-540x304.jpg': 'the-countryside/the-countryside.jpg',
#'files/MGP0538-540x358.jpg': '',
#'files/IMGP0539-540x358.jpg': '',
#'files/IMGP0538-540x358.jpg': '',
#}
filenames = struct(
{
"rss": "index.xml",
"atom": "atom.xml",
"json": "index.json",
"md": "index.md",
"txt": "index.txt",
"html": "index.html",
"gopher": "gophermap",
"oembed_xml": "oembed.xml",
"oembed_json": "oembed.json",
"memento": "memento.html",
"sitemap": "sitemap.xml",
}
)
tmpdir = os.path.join(gettempdir(),'nasg')
datignore = [".git", ".dat", "**.php"]
photo = struct(
{
"re_author": re.compile(
r"(?:P[eé]ter Moln[aá]r)|(?:Moln[aá]r P[eé]ter)|(?:petermolnar\.(?:eu|net))"
),
"default": 720,
"sizes": {
# 90 = s
# 360 = m
720: "",
1280: "_b",
},
"earlyyears": 2014,
}
)
# symlinks = {
#'files/a-view-from-barbican-1280x720.jpg': 'a-view-from-barbican/a-view-from-barbican_b.jpg',
#'files/hills_from_beachy_head-540x226.jpg': 'hills-from-beachy-head/hills-from-beachy-head.jpg',
#'files/seven_sisters_from_beachy_head-540x304.jpg': 'seven-sisters-from-beachy-head/seven-sisters-from-beachy-head.jpg',
#'files/the_countryside-540x304.jpg': 'the-countryside/the-countryside.jpg',
#'files/MGP0538-540x358.jpg': '',
#'files/IMGP0539-540x358.jpg': '',
#'files/IMGP0538-540x358.jpg': '',
# }
tmpdir = os.path.join(gettempdir(), "nasg")
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
_parser = argparse.ArgumentParser(description='Parameters for NASG')
_parser = argparse.ArgumentParser(description="Parameters for NASG")
_booleanparams = {
'regenerate': 'force (re)downsizing images',
'force': 'force (re)rendering HTML',
'debug': 'set logging to debug level',
'quiet': 'show only errors',
'offline': 'offline mode - no syncing, no querying services, etc.',
'noping': 'make dummy webmention entries and don\'t really send them',
'noservices': 'skip querying any service but do sync the website',
'memento': 'try to fetch mementos from archive.org'
"regenerate": "force (re)downsizing images",
"force": "force (re)rendering HTML",
"debug": "set logging to debug level",
"quiet": "show only errors",
"offline": "offline mode - no syncing, no querying services, etc.",
"noping": "make dummy webmention entries and don't really send them",
"noservices": "skip querying any service but do sync the website",
"memento": "try to fetch mementos from archive.org",
}
for k, v in _booleanparams.items():
_parser.add_argument(
'--%s' % (k),
action='store_true',
default=False,
help=v
)
_parser.add_argument("--%s" % (k), action="store_true", default=False, help=v)
args = vars(_parser.parse_args())
if args.get('debug', False):
if args.get("debug", False):
loglevel = 10
elif args.get('quiet', False):
elif args.get("quiet", False):
loglevel = 40
else:
loglevel = 20
logger = logging.getLogger('NASG')
logger = logging.getLogger("NASG")
logger.setLevel(loglevel)
console_handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
logging.getLogger('asyncio').setLevel(loglevel)
logging.getLogger("asyncio").setLevel(loglevel)