re-added enclosure for feed and webmention.io parsing for new comments before moving into rendering
This commit is contained in:
parent
864bb54496
commit
1c17232a6a
2 changed files with 102 additions and 11 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -5,3 +5,5 @@ _scratch
|
||||||
|
|
||||||
keys.py
|
keys.py
|
||||||
nasg.proj
|
nasg.proj
|
||||||
|
.idea
|
||||||
|
run
|
||||||
|
|
111
nasg.py
111
nasg.py
|
@ -28,8 +28,10 @@ import markdown
|
||||||
from feedgen.feed import FeedGenerator
|
from feedgen.feed import FeedGenerator
|
||||||
from bleach import clean
|
from bleach import clean
|
||||||
from emoji import UNICODE_EMOJI
|
from emoji import UNICODE_EMOJI
|
||||||
|
import requests
|
||||||
import exiftool
|
import exiftool
|
||||||
import settings
|
import settings
|
||||||
|
import keys
|
||||||
|
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
|
||||||
|
@ -203,6 +205,15 @@ class Singular(MarkdownDoc):
|
||||||
self.category = os.path.basename(os.path.dirname(n))
|
self.category = os.path.basename(os.path.dirname(n))
|
||||||
self.mtime = os.path.getmtime(self.fpath)
|
self.mtime = os.path.getmtime(self.fpath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ctime(self):
|
||||||
|
ret = self.mtime
|
||||||
|
if len(self.comments):
|
||||||
|
ctime = int(sorted(self.comments.keys())[-1])
|
||||||
|
if ctime > self.mtime:
|
||||||
|
ret = ctime
|
||||||
|
return ret
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@cached()
|
@cached()
|
||||||
def files(self):
|
def files(self):
|
||||||
|
@ -224,7 +235,7 @@ class Singular(MarkdownDoc):
|
||||||
An dict of Comment objects keyed with their path, populated from the
|
An dict of Comment objects keyed with their path, populated from the
|
||||||
same directory level as the Singular objects
|
same directory level as the Singular objects
|
||||||
"""
|
"""
|
||||||
comments = OrderedDict()
|
comments = {}
|
||||||
files = [
|
files = [
|
||||||
k
|
k
|
||||||
for k in glob.glob(os.path.join(os.path.dirname(self.fpath), '*.md'))
|
for k in glob.glob(os.path.join(os.path.dirname(self.fpath), '*.md'))
|
||||||
|
@ -272,11 +283,25 @@ class Singular(MarkdownDoc):
|
||||||
directory - so, it's slug -, and that that image believes it's a a
|
directory - so, it's slug -, and that that image believes it's a a
|
||||||
photo.
|
photo.
|
||||||
"""
|
"""
|
||||||
|
if len(self.images) != 1:
|
||||||
|
return False
|
||||||
|
photo = next(iter(self.images.values()))
|
||||||
maybe = self.fpath.replace("index.md", "%s.jpg" % (self.name))
|
maybe = self.fpath.replace("index.md", "%s.jpg" % (self.name))
|
||||||
if maybe in self.images and self.images[maybe].is_photo:
|
if photo.fpath == maybe:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def enclosure(self):
|
||||||
|
if not self.is_photo:
|
||||||
|
return False
|
||||||
|
photo = next(iter(self.images.values()))
|
||||||
|
return {
|
||||||
|
'mime': photo.mime_type,
|
||||||
|
'size': photo.mime_size,
|
||||||
|
'url': photo.href
|
||||||
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def summary(self):
|
def summary(self):
|
||||||
return self.meta.get('summary', '')
|
return self.meta.get('summary', '')
|
||||||
|
@ -395,7 +420,7 @@ class Singular(MarkdownDoc):
|
||||||
@property
|
@property
|
||||||
@cached()
|
@cached()
|
||||||
def tmplvars(self):
|
def tmplvars(self):
|
||||||
return {
|
v = {
|
||||||
'title': self.title,
|
'title': self.title,
|
||||||
'category': self.category,
|
'category': self.category,
|
||||||
'lang': self.lang,
|
'lang': self.lang,
|
||||||
|
@ -413,6 +438,9 @@ class Singular(MarkdownDoc):
|
||||||
'syndicate': self.syndicate,
|
'syndicate': self.syndicate,
|
||||||
'url': self.url,
|
'url': self.url,
|
||||||
}
|
}
|
||||||
|
if (self.enclosure):
|
||||||
|
v.update({'enclosure': self.enclosure})
|
||||||
|
return v
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def template(self):
|
def template(self):
|
||||||
|
@ -435,7 +463,7 @@ class Singular(MarkdownDoc):
|
||||||
return False
|
return False
|
||||||
elif not os.path.exists(self.renderfile):
|
elif not os.path.exists(self.renderfile):
|
||||||
return False
|
return False
|
||||||
elif self.mtime > os.path.getmtime(self.renderfile):
|
elif self.ctime > os.path.getmtime(self.renderfile):
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
@ -982,13 +1010,14 @@ class Category(dict):
|
||||||
settings.author.get('name'),
|
settings.author.get('name'),
|
||||||
dt.format('YYYY')
|
dt.format('YYYY')
|
||||||
))
|
))
|
||||||
# if p.get('enclosure'):
|
if 'enclosure' in post:
|
||||||
#enclosure = p.get('enclosure')
|
enc = post.get('enclosure')
|
||||||
# fe.enclosure(
|
logging.info("enclosure found:", enc)
|
||||||
# enclosure.get('url'),
|
fe.enclosure(
|
||||||
#"%d" % enclosure.get('size'),
|
enc.get('url'),
|
||||||
# enclosure.get('mime')
|
"%d" % enc.get('size'),
|
||||||
# )
|
enc.get('mime')
|
||||||
|
)
|
||||||
atom = os.path.join(dirname, 'index.xml')
|
atom = os.path.join(dirname, 'index.xml')
|
||||||
with open(atom, 'wb') as f:
|
with open(atom, 'wb') as f:
|
||||||
logging.info('writing file: %s', atom)
|
logging.info('writing file: %s', atom)
|
||||||
|
@ -1139,8 +1168,68 @@ class Search(object):
|
||||||
f.write(r)
|
f.write(r)
|
||||||
|
|
||||||
|
|
||||||
|
def find_newest_comment():
|
||||||
|
newest = 0
|
||||||
|
content = settings.paths.get('content')
|
||||||
|
for e in glob.glob(os.path.join(content, '*', '*', '*.md')):
|
||||||
|
if os.path.basename(e) == 'index.md':
|
||||||
|
continue
|
||||||
|
mtime = os.path.getmtime(e)
|
||||||
|
if mtime > newest:
|
||||||
|
newest = mtime
|
||||||
|
return newest
|
||||||
|
|
||||||
|
def mkcomment(webmention):
|
||||||
|
dt = arrow.get(webmention.get('data').get('published'))
|
||||||
|
fdir = os.path.join(
|
||||||
|
settings.paths.get('content'),
|
||||||
|
webmention.get('target').strip('/').split('/')[-1]
|
||||||
|
)
|
||||||
|
fpath = os.path.join(
|
||||||
|
fdir,
|
||||||
|
"%d-%s.md" % (
|
||||||
|
dt.timestamp,
|
||||||
|
slugify(
|
||||||
|
re.sub(r"^https?://(?:www)?", "", url),
|
||||||
|
only_ascii=True,
|
||||||
|
lower=True
|
||||||
|
)[:200]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
fm = frontmatter.loads('')
|
||||||
|
fm.metadata = {
|
||||||
|
'author': webmention.get('data').get('author'),
|
||||||
|
'date': dt.format(settings.dateformat.get('iso')),
|
||||||
|
'source': webmention.get('source'),
|
||||||
|
'target': webmention.get('target'),
|
||||||
|
'type': webmention.get('activity').get('type')
|
||||||
|
}
|
||||||
|
fm.content = webmention.get('data').get('content')
|
||||||
|
with open(fpath, 'wt') as f:
|
||||||
|
logging.info("saving webmention to %s", fpath)
|
||||||
|
f.write(frontmatter.dumps(fm))
|
||||||
|
|
||||||
|
|
||||||
def make():
|
def make():
|
||||||
start = int(round(time.time() * 1000))
|
start = int(round(time.time() * 1000))
|
||||||
|
|
||||||
|
newest = arrow.get(find_newest_comment())
|
||||||
|
wio_params = {
|
||||||
|
'jsonp': '',
|
||||||
|
'token': '%s' % (keys.webmentionio.get('token')),
|
||||||
|
'since': '%s' % newest.format(settings.dateformat.get('iso')),
|
||||||
|
'domain': '%s' % (keys.webmentionio.get('domain'))
|
||||||
|
}
|
||||||
|
wio_url = "https://webmention.io/api/mentions"
|
||||||
|
webmentions = requests.get(wio_url, params=wio_params)
|
||||||
|
try:
|
||||||
|
webmentions = webmentions.json()
|
||||||
|
for webmention in webmentions.get('links'):
|
||||||
|
mkcomment(webmention)
|
||||||
|
except ValueError as e:
|
||||||
|
pass
|
||||||
|
|
||||||
content = settings.paths.get('content')
|
content = settings.paths.get('content')
|
||||||
worker = AsyncWorker()
|
worker = AsyncWorker()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue