making yearly pagination a bit more sensible, cleaner; adding trigger to only rebuild old archives if old posts' mtime got modified
This commit is contained in:
parent
10045afa09
commit
4cb7623355
4 changed files with 227 additions and 168 deletions
343
nasg.py
343
nasg.py
|
@ -19,6 +19,7 @@ from shutil import copy2 as cp
|
||||||
from math import ceil
|
from math import ceil
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from collections import OrderedDict, namedtuple
|
from collections import OrderedDict, namedtuple
|
||||||
|
import logging
|
||||||
import arrow
|
import arrow
|
||||||
import langdetect
|
import langdetect
|
||||||
import wand.image
|
import wand.image
|
||||||
|
@ -36,6 +37,8 @@ import keys
|
||||||
|
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
|
||||||
|
logger = logging.getLogger('NASG')
|
||||||
|
|
||||||
MarkdownImage = namedtuple(
|
MarkdownImage = namedtuple(
|
||||||
'MarkdownImage',
|
'MarkdownImage',
|
||||||
['match', 'alt', 'fname', 'title', 'css']
|
['match', 'alt', 'fname', 'title', 'css']
|
||||||
|
@ -64,6 +67,18 @@ RE_PRECODE = re.compile(
|
||||||
r'<pre class="([^"]+)"><code>'
|
r'<pre class="([^"]+)"><code>'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def writepath(fpath, content, mtime=0):
|
||||||
|
d = os.path.dirname(fpath)
|
||||||
|
if not os.path.isdir(d):
|
||||||
|
logger.debug('creating directory tree %s', d)
|
||||||
|
os.makedirs(d)
|
||||||
|
with open(fpath, 'wt') as f:
|
||||||
|
logger.info('writing file %s', fpath)
|
||||||
|
f.write(content)
|
||||||
|
# TODO
|
||||||
|
#if (mtime > 0):
|
||||||
|
|
||||||
|
|
||||||
#def relurl(url,base=settings.site.get('url')):
|
#def relurl(url,base=settings.site.get('url')):
|
||||||
#url =urlparse(url)
|
#url =urlparse(url)
|
||||||
#base = urlparse(base)
|
#base = urlparse(base)
|
||||||
|
@ -118,11 +133,7 @@ class Webmention(object):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def save(self, content):
|
async def save(self, content):
|
||||||
d = os.path.dirname(self.fpath)
|
writepath(self.fpath, content)
|
||||||
if not os.path.isdir(d):
|
|
||||||
os.makedirs(d)
|
|
||||||
with open(self.fpath, 'wt') as f:
|
|
||||||
f.write(content)
|
|
||||||
|
|
||||||
async def send(self):
|
async def send(self):
|
||||||
if self.exists:
|
if self.exists:
|
||||||
|
@ -134,13 +145,13 @@ class Webmention(object):
|
||||||
'target': '%s' % (self.target)
|
'target': '%s' % (self.target)
|
||||||
}
|
}
|
||||||
r = requests.post(telegraph_url, data=telegraph_params)
|
r = requests.post(telegraph_url, data=telegraph_params)
|
||||||
settings.logger.info(
|
logger.info(
|
||||||
"sent webmention to telegraph from %s to %s",
|
"sent webmention to telegraph from %s to %s",
|
||||||
self.source,
|
self.source,
|
||||||
self.target
|
self.target
|
||||||
)
|
)
|
||||||
if r.status_code not in [200, 201, 202]:
|
if r.status_code not in [200, 201, 202]:
|
||||||
settings.logger.error('sending failed: %s %s', r.status_code, r.text)
|
logger.error('sending failed: %s %s', r.status_code, r.text)
|
||||||
else:
|
else:
|
||||||
await self.save(r.text)
|
await self.save(r.text)
|
||||||
|
|
||||||
|
@ -149,7 +160,7 @@ class MarkdownDoc(object):
|
||||||
@cached_property
|
@cached_property
|
||||||
def _parsed(self):
|
def _parsed(self):
|
||||||
with open(self.fpath, mode='rt') as f:
|
with open(self.fpath, mode='rt') as f:
|
||||||
settings.logger.debug('parsing YAML+MD file %s', self.fpath)
|
logger.debug('parsing YAML+MD file %s', self.fpath)
|
||||||
meta, txt = frontmatter.parse(f.read())
|
meta, txt = frontmatter.parse(f.read())
|
||||||
return(meta, txt)
|
return(meta, txt)
|
||||||
|
|
||||||
|
@ -571,19 +582,17 @@ class Singular(MarkdownDoc):
|
||||||
def template(self):
|
def template(self):
|
||||||
return "%s.j2.html" % (self.__class__.__name__)
|
return "%s.j2.html" % (self.__class__.__name__)
|
||||||
|
|
||||||
@cached_property
|
@property
|
||||||
def renderdir(self):
|
def renderdir(self):
|
||||||
d = os.path.join(
|
return os.path.dirname(self.renderfile)
|
||||||
settings.paths.get('build'),
|
|
||||||
self.name
|
|
||||||
)
|
|
||||||
if not os.path.isdir(d):
|
|
||||||
os.makedirs(d)
|
|
||||||
return d
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def renderfile(self):
|
def renderfile(self):
|
||||||
return os.path.join(self.renderdir, 'index.html')
|
return os.path.join(
|
||||||
|
settings.paths.get('build'),
|
||||||
|
self.name,
|
||||||
|
'index.html'
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def exists(self):
|
def exists(self):
|
||||||
|
@ -610,7 +619,7 @@ class Singular(MarkdownDoc):
|
||||||
#fm.metadata = self.meta
|
#fm.metadata = self.meta
|
||||||
#fm.content = self.content
|
#fm.content = self.content
|
||||||
#with open(fpath, 'wt') as f:
|
#with open(fpath, 'wt') as f:
|
||||||
#settings.logger.info("updating %s", fpath)
|
#logger.info("updating %s", fpath)
|
||||||
#f.write(frontmatter.dumps(fm))
|
#f.write(frontmatter.dumps(fm))
|
||||||
|
|
||||||
async def copyfiles(self):
|
async def copyfiles(self):
|
||||||
|
@ -631,13 +640,13 @@ class Singular(MarkdownDoc):
|
||||||
)
|
)
|
||||||
if os.path.exists(t) and os.path.getmtime(f) <= os.path.getmtime(t):
|
if os.path.exists(t) and os.path.getmtime(f) <= os.path.getmtime(t):
|
||||||
continue
|
continue
|
||||||
settings.logger.info("copying '%s' to '%s'", f, t)
|
logger.info("copying '%s' to '%s'", f, t)
|
||||||
cp(f, t)
|
cp(f, t)
|
||||||
|
|
||||||
async def render(self):
|
async def render(self):
|
||||||
if self.exists:
|
if self.exists:
|
||||||
return
|
return
|
||||||
settings.logger.info("rendering %s", self.name)
|
logger.info("rendering %s", self.name)
|
||||||
r = J2.get_template(self.template).render({
|
r = J2.get_template(self.template).render({
|
||||||
'post': self.tmplvars,
|
'post': self.tmplvars,
|
||||||
'site': settings.site,
|
'site': settings.site,
|
||||||
|
@ -646,17 +655,12 @@ class Singular(MarkdownDoc):
|
||||||
'licence': settings.licence,
|
'licence': settings.licence,
|
||||||
'tips': settings.tips,
|
'tips': settings.tips,
|
||||||
})
|
})
|
||||||
if not os.path.isdir(self.renderdir):
|
writepath(self.renderfile, r)
|
||||||
settings.logger.info("creating directory: %s", self.renderdir)
|
|
||||||
os.makedirs(self.renderdir)
|
|
||||||
with open(self.renderfile, 'wt') as f:
|
|
||||||
settings.logger.info("saving to %s", self.renderfile)
|
|
||||||
f.write(r)
|
|
||||||
|
|
||||||
|
|
||||||
class WebImage(object):
|
class WebImage(object):
|
||||||
def __init__(self, fpath, mdimg, parent):
|
def __init__(self, fpath, mdimg, parent):
|
||||||
settings.logger.debug("loading image: %s", fpath)
|
logger.debug("loading image: %s", fpath)
|
||||||
self.mdimg = mdimg
|
self.mdimg = mdimg
|
||||||
self.fpath = fpath
|
self.fpath = fpath
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
|
@ -868,7 +872,7 @@ class WebImage(object):
|
||||||
img = self._maybe_watermark(img)
|
img = self._maybe_watermark(img)
|
||||||
for size, resized in self.resized_images:
|
for size, resized in self.resized_images:
|
||||||
if not resized.exists or settings.args.get('regenerate'):
|
if not resized.exists or settings.args.get('regenerate'):
|
||||||
settings.logger.info(
|
logger.info(
|
||||||
"resizing image: %s to size %d",
|
"resizing image: %s to size %d",
|
||||||
os.path.basename(self.fpath),
|
os.path.basename(self.fpath),
|
||||||
size
|
size
|
||||||
|
@ -985,7 +989,7 @@ class WebImage(object):
|
||||||
|
|
||||||
# this is to make sure pjpeg happens
|
# this is to make sure pjpeg happens
|
||||||
with open(self.fpath, 'wb') as f:
|
with open(self.fpath, 'wb') as f:
|
||||||
settings.logger.info("writing %s", self.fpath)
|
logger.info("writing %s", self.fpath)
|
||||||
thumb.save(file=f)
|
thumb.save(file=f)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1129,9 +1133,7 @@ class Search(PHPFile):
|
||||||
'licence': settings.licence,
|
'licence': settings.licence,
|
||||||
'tips': settings.tips,
|
'tips': settings.tips,
|
||||||
})
|
})
|
||||||
with open(self.renderfile, 'wt') as f:
|
writepath(self.renderfile, r)
|
||||||
settings.logger.info("rendering to %s", self.renderfile)
|
|
||||||
f.write(r)
|
|
||||||
|
|
||||||
|
|
||||||
class IndexPHP(PHPFile):
|
class IndexPHP(PHPFile):
|
||||||
|
@ -1169,7 +1171,7 @@ class IndexPHP(PHPFile):
|
||||||
'redirects': self.redirect
|
'redirects': self.redirect
|
||||||
})
|
})
|
||||||
with open(self.renderfile, 'wt') as f:
|
with open(self.renderfile, 'wt') as f:
|
||||||
settings.logger.info("rendering to %s", self.renderfile)
|
logger.info("rendering to %s", self.renderfile)
|
||||||
f.write(r)
|
f.write(r)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1192,7 +1194,7 @@ class WebhookPHP(PHPFile):
|
||||||
'zapier': keys.zapier,
|
'zapier': keys.zapier,
|
||||||
})
|
})
|
||||||
with open(self.renderfile, 'wt') as f:
|
with open(self.renderfile, 'wt') as f:
|
||||||
settings.logger.info("rendering to %s", self.renderfile)
|
logger.info("rendering to %s", self.renderfile)
|
||||||
f.write(r)
|
f.write(r)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1243,7 +1245,7 @@ class Category(dict):
|
||||||
return url
|
return url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def feed(self):
|
def feedurl(self):
|
||||||
return "%sfeed/" % (self.url)
|
return "%sfeed/" % (self.url)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -1251,7 +1253,7 @@ class Category(dict):
|
||||||
return "%s.j2.html" % (self.__class__.__name__)
|
return "%s.j2.html" % (self.__class__.__name__)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def renderdir(self):
|
def dpath(self):
|
||||||
if len(self.name):
|
if len(self.name):
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
settings.paths.get('build'),
|
settings.paths.get('build'),
|
||||||
|
@ -1262,9 +1264,14 @@ class Category(dict):
|
||||||
return settings.paths.get('build')
|
return settings.paths.get('build')
|
||||||
|
|
||||||
def navlink(self, ts):
|
def navlink(self, ts):
|
||||||
|
label = ts.format(self.trange)
|
||||||
|
if arrow.utcnow().format(self.trange) == label:
|
||||||
|
url = self.url
|
||||||
|
else:
|
||||||
|
url = "%s%s/" % (self.url, label)
|
||||||
return {
|
return {
|
||||||
'url': "%s%s/" % (self.url, ts.format(self.trange)),
|
'url': url,
|
||||||
'label': ts.format(self.trange)
|
'label': label
|
||||||
}
|
}
|
||||||
|
|
||||||
def tmplvars(self, posts=[], c=False, p=False, n=False):
|
def tmplvars(self, posts=[], c=False, p=False, n=False):
|
||||||
|
@ -1288,12 +1295,12 @@ class Category(dict):
|
||||||
'name': self.name,
|
'name': self.name,
|
||||||
'display': self.display,
|
'display': self.display,
|
||||||
'url': self.url,
|
'url': self.url,
|
||||||
'feed': "%s%s/" % (self.url, 'feed'),
|
'feed': self.feedurl,
|
||||||
#'jsonfeed': "%s%s/index.json" % (self.url, 'feed'),
|
|
||||||
'title': self.title,
|
'title': self.title,
|
||||||
'current': c,
|
'current': c,
|
||||||
'previous': p,
|
'previous': p,
|
||||||
'next': n,
|
'next': n,
|
||||||
|
'currentyear': arrow.utcnow().format('YYYY')
|
||||||
},
|
},
|
||||||
'posts': posts,
|
'posts': posts,
|
||||||
}
|
}
|
||||||
|
@ -1302,35 +1309,54 @@ class Category(dict):
|
||||||
def mtime(self):
|
def mtime(self):
|
||||||
return arrow.get(self[self.sortedkeys[0]].published).timestamp
|
return arrow.get(self[self.sortedkeys[0]].published).timestamp
|
||||||
|
|
||||||
|
# @property
|
||||||
|
# def exists(self):
|
||||||
|
# if settings.args.get('force'):
|
||||||
|
# return False
|
||||||
|
# ismissing = False
|
||||||
|
# for f in [
|
||||||
|
# os.path.join(self.renderdir, 'feed', 'index.xml'),
|
||||||
|
# ]:
|
||||||
|
# if not os.path.exists(f):
|
||||||
|
# ismissing = True
|
||||||
|
# elif self.mtime > os.path.getmtime(f):
|
||||||
|
# ismissing = True
|
||||||
|
# if ismissing:
|
||||||
|
# return False
|
||||||
|
# else:
|
||||||
|
# return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def exists(self):
|
def rssfeedfpath(self):
|
||||||
if settings.args.get('force'):
|
return os.path.join(
|
||||||
return False
|
self.dpath,
|
||||||
ismissing = False
|
'feed',
|
||||||
for f in [
|
'index.xml'
|
||||||
os.path.join(self.renderdir, 'feed', 'index.xml'),
|
)
|
||||||
]:
|
|
||||||
if not os.path.exists(f):
|
@property
|
||||||
ismissing = True
|
def atomfeedfpath(self):
|
||||||
elif self.mtime > os.path.getmtime(f):
|
return os.path.join(
|
||||||
ismissing = True
|
self.dpath,
|
||||||
if ismissing:
|
'feed',
|
||||||
return False
|
'atom.xml'
|
||||||
|
)
|
||||||
|
|
||||||
|
def indexfpath(self, subpath=None):
|
||||||
|
if subpath:
|
||||||
|
return os.path.join(
|
||||||
|
self.dpath,
|
||||||
|
subpath,
|
||||||
|
'index.html'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return True
|
return os.path.join(
|
||||||
|
self.dpath,
|
||||||
async def render_feeds(self):
|
'index.html'
|
||||||
await self.render_rss();
|
)
|
||||||
await self.render_atom();
|
|
||||||
|
|
||||||
async def render_rss(self):
|
|
||||||
await self.render_feed('rss')
|
|
||||||
|
|
||||||
async def render_atom(self):
|
|
||||||
await self.render_feed('atom')
|
|
||||||
|
|
||||||
async def render_feed(self, xmlformat):
|
async def render_feed(self, xmlformat):
|
||||||
settings.logger.info(
|
logger.info(
|
||||||
'rendering category "%s" %s feed',
|
'rendering category "%s" %s feed',
|
||||||
self.name,
|
self.name,
|
||||||
xmlformat
|
xmlformat
|
||||||
|
@ -1338,12 +1364,8 @@ class Category(dict):
|
||||||
start = 0
|
start = 0
|
||||||
end = int(settings.site.get('pagination'))
|
end = int(settings.site.get('pagination'))
|
||||||
|
|
||||||
dirname = os.path.join(self.renderdir, 'feed')
|
|
||||||
if not os.path.isdir(dirname):
|
|
||||||
os.makedirs(dirname)
|
|
||||||
|
|
||||||
fg = FeedGenerator()
|
fg = FeedGenerator()
|
||||||
fg.id(self.feed)
|
fg.id(self.feedurl)
|
||||||
fg.title(self.title)
|
fg.title(self.title)
|
||||||
fg.author({
|
fg.author({
|
||||||
'name': settings.author.get('name'),
|
'name': settings.author.get('name'),
|
||||||
|
@ -1401,74 +1423,74 @@ class Category(dict):
|
||||||
fe.summary(post.get('summary'))
|
fe.summary(post.get('summary'))
|
||||||
|
|
||||||
if xmlformat == 'rss':
|
if xmlformat == 'rss':
|
||||||
fg.link(href=self.feed)
|
fg.link(href=self.feedurl)
|
||||||
feedfile = os.path.join(dirname, 'index.xml')
|
writepath(self.rssfeedfpath, '%s' % fg.rss_str(pretty=True))
|
||||||
elif xmlformat == 'atom':
|
elif xmlformat == 'atom':
|
||||||
fg.link(href=self.feed, rel='self')
|
fg.link(href=self.feedurl, rel='self')
|
||||||
fg.link(href=settings.meta.get('hub'), rel='hub')
|
fg.link(href=settings.meta.get('hub'), rel='hub')
|
||||||
|
writepath(self.atomfeedfpath, '%s' % fg.atom_str(pretty=True))
|
||||||
feedfile = os.path.join(dirname, 'atom.xml')
|
|
||||||
|
|
||||||
with open(feedfile, 'wb') as f:
|
|
||||||
settings.logger.info('writing file: %s', feedfile)
|
|
||||||
if xmlformat == 'rss':
|
|
||||||
f.write(fg.rss_str(pretty=True))
|
|
||||||
elif xmlformat == 'atom':
|
|
||||||
f.write(fg.atom_str(pretty=True))
|
|
||||||
|
|
||||||
async def render_flat(self):
|
async def render_flat(self):
|
||||||
r = J2.get_template(self.template).render(
|
r = J2.get_template(self.template).render(
|
||||||
self.tmplvars([self[k].tmplvars for k in self.sortedkeys])
|
self.tmplvars(self.get_posts())
|
||||||
|
#[self[k].tmplvars for k in self.sortedkeys]
|
||||||
)
|
)
|
||||||
|
writepath(self.indexfpath(), r)
|
||||||
|
|
||||||
renderfile = os.path.join(self.renderdir, 'index.html')
|
def is_uptodate(self, fpath, ts):
|
||||||
with open(renderfile, 'wt') as f:
|
if not os.path.exists(fpath):
|
||||||
f.write(r)
|
return False
|
||||||
|
if os.path.getmtime(fpath) >= ts:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def newest(self, start=0, end=-1):
|
||||||
#async def render_page(self, tmplvars):
|
if start == end:
|
||||||
|
end = -1
|
||||||
|
s = sorted(
|
||||||
# async def render_page(self, pagenum=1, pages=1):
|
[self[k].mtime for k in self.sortedkeys[start:end]],
|
||||||
# if self.display == 'flat':
|
reverse=True
|
||||||
# start = 0
|
)
|
||||||
# end = -1
|
return s[0]
|
||||||
# else:
|
|
||||||
# pagination = int(settings.site.get('pagination'))
|
|
||||||
# start = int((pagenum - 1) * pagination)
|
|
||||||
# end = int(start + pagination)
|
|
||||||
|
|
||||||
# posts = self.get_posts(start, end)
|
|
||||||
# r = J2.get_template(self.template).render({
|
|
||||||
# 'site': settings.site,
|
|
||||||
# 'author': settings.author,
|
|
||||||
# 'meta': settings.meta,
|
|
||||||
# 'licence': settings.licence,
|
|
||||||
# 'tips': settings.tips,
|
|
||||||
# 'category': self.tmplvars,
|
|
||||||
# 'pages': {
|
|
||||||
# 'current': pagenum,
|
|
||||||
# 'total': pages,
|
|
||||||
# },
|
|
||||||
# 'posts': posts,
|
|
||||||
# })
|
|
||||||
# if pagenum > 1:
|
|
||||||
# renderdir = os.path.join(self.renderdir, 'page', str(pagenum))
|
|
||||||
# else:
|
|
||||||
# renderdir = self.renderdir
|
|
||||||
# if not os.path.isdir(renderdir):
|
|
||||||
# os.makedirs(renderdir)
|
|
||||||
# renderfile = os.path.join(renderdir, 'index.html')
|
|
||||||
# with open(renderfile, 'wt') as f:
|
|
||||||
# f.write(r)
|
|
||||||
|
|
||||||
async def render(self):
|
async def render(self):
|
||||||
if self.exists:
|
newest = self.newest()
|
||||||
return
|
if not self.is_uptodate(self.rssfeedfpath, newest):
|
||||||
|
logger.info(
|
||||||
|
'%s RSS feed outdated, generating new',
|
||||||
|
self.name
|
||||||
|
)
|
||||||
|
await self.render_feed('rss')
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'%s RSS feed up to date',
|
||||||
|
self.name
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.is_uptodate(self.atomfeedfpath, newest):
|
||||||
|
logger.info(
|
||||||
|
'%s ATOM feed outdated, generating new',
|
||||||
|
self.name
|
||||||
|
)
|
||||||
|
await self.render_feed('atom')
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'%s ATOM feed up to date',
|
||||||
|
self.name
|
||||||
|
)
|
||||||
|
|
||||||
await self.render_feeds()
|
|
||||||
if self.display == 'flat':
|
if self.display == 'flat':
|
||||||
|
if not self.is_uptodate(self.indexfpath(), newest):
|
||||||
|
logger.info(
|
||||||
|
'%s flat index outdated, generating new',
|
||||||
|
self.name
|
||||||
|
)
|
||||||
await self.render_flat()
|
await self.render_flat()
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'%s flat index is up to date',
|
||||||
|
self.name
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
by_time = {}
|
by_time = {}
|
||||||
|
@ -1482,21 +1504,34 @@ class Category(dict):
|
||||||
|
|
||||||
keys = list(by_time.keys())
|
keys = list(by_time.keys())
|
||||||
for p, c, n in zip([None]+keys[:-1], keys, keys[1:]+[None]):
|
for p, c, n in zip([None]+keys[:-1], keys, keys[1:]+[None]):
|
||||||
if arrow.utcnow().format(self.trange) == c.format(self.trange):
|
form = c.format(self.trange)
|
||||||
renderdir = self.renderdir
|
if arrow.utcnow().format(self.trange) == form:
|
||||||
|
fpath = self.indexfpath()
|
||||||
else:
|
else:
|
||||||
renderdir = os.path.join(
|
fpath = self.indexfpath(form)
|
||||||
self.renderdir,
|
|
||||||
c.format(self.trange)
|
|
||||||
)
|
|
||||||
#
|
|
||||||
if not os.path.isdir(renderdir):
|
|
||||||
os.makedirs(renderdir)
|
|
||||||
renderfile = os.path.join(
|
|
||||||
renderdir,
|
|
||||||
'index.html'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
findex = self.sortedkeys.index(by_time[c][0])
|
||||||
|
lindex = self.sortedkeys.index(by_time[c][-1])
|
||||||
|
newest = self.newest(findex, lindex)
|
||||||
|
except Exception as e:
|
||||||
|
#logger.info('newest called with start: %s, end: %s', start, end)
|
||||||
|
logger.error('calling newest failed with %s for %s', self.name, c)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.is_uptodate(fpath, newest):
|
||||||
|
logger.info(
|
||||||
|
'%s/%s index is up to date',
|
||||||
|
self.name,
|
||||||
|
form
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'%s/%s index is outdated, generating new',
|
||||||
|
self.name,
|
||||||
|
form
|
||||||
|
)
|
||||||
r = J2.get_template(self.template).render(
|
r = J2.get_template(self.template).render(
|
||||||
self.tmplvars(
|
self.tmplvars(
|
||||||
[self[k].tmplvars for k in by_time[c]],
|
[self[k].tmplvars for k in by_time[c]],
|
||||||
|
@ -1505,9 +1540,7 @@ class Category(dict):
|
||||||
n=n
|
n=n
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
with open(renderfile, 'wt') as f:
|
writepath(fpath, r)
|
||||||
settings.logger.info('writing category archive to: %s', renderfile)
|
|
||||||
f.write(r)
|
|
||||||
|
|
||||||
class Sitemap(dict):
|
class Sitemap(dict):
|
||||||
@property
|
@property
|
||||||
|
@ -1539,13 +1572,13 @@ def mkcomment(webmention):
|
||||||
|
|
||||||
fdir = glob.glob(os.path.join(settings.paths.get('content'), '*', slug))
|
fdir = glob.glob(os.path.join(settings.paths.get('content'), '*', slug))
|
||||||
if not len(fdir):
|
if not len(fdir):
|
||||||
settings.logger.error(
|
logger.error(
|
||||||
"couldn't find post for incoming webmention: %s",
|
"couldn't find post for incoming webmention: %s",
|
||||||
webmention
|
webmention
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
elif len(fdir) > 1:
|
elif len(fdir) > 1:
|
||||||
settings.logger.error(
|
logger.error(
|
||||||
"multiple posts found for incoming webmention: %s",
|
"multiple posts found for incoming webmention: %s",
|
||||||
webmention
|
webmention
|
||||||
)
|
)
|
||||||
|
@ -1574,7 +1607,7 @@ def mkcomment(webmention):
|
||||||
else:
|
else:
|
||||||
fm.content = c
|
fm.content = c
|
||||||
with open(fpath, 'wt') as f:
|
with open(fpath, 'wt') as f:
|
||||||
settings.logger.info("saving webmention to %s", fpath)
|
logger.info("saving webmention to %s", fpath)
|
||||||
f.write(frontmatter.dumps(fm))
|
f.write(frontmatter.dumps(fm))
|
||||||
|
|
||||||
|
|
||||||
|
@ -1596,7 +1629,7 @@ def makecomments():
|
||||||
}
|
}
|
||||||
wio_url = "https://webmention.io/api/mentions"
|
wio_url = "https://webmention.io/api/mentions"
|
||||||
webmentions = requests.get(wio_url, params=wio_params)
|
webmentions = requests.get(wio_url, params=wio_params)
|
||||||
settings.logger.info("queried webmention.io with: %s", webmentions.url)
|
logger.info("queried webmention.io with: %s", webmentions.url)
|
||||||
if webmentions.status_code != requests.codes.ok:
|
if webmentions.status_code != requests.codes.ok:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
|
@ -1604,7 +1637,7 @@ def makecomments():
|
||||||
for webmention in mentions.get('links'):
|
for webmention in mentions.get('links'):
|
||||||
mkcomment(webmention)
|
mkcomment(webmention)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
settings.logger.error('failed to query webmention.io: %s', e)
|
logger.error('failed to query webmention.io: %s', e)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -1620,7 +1653,10 @@ def make():
|
||||||
start = int(round(time.time() * 1000))
|
start = int(round(time.time() * 1000))
|
||||||
last = 0
|
last = 0
|
||||||
|
|
||||||
|
try:
|
||||||
makecomments()
|
makecomments()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('failed to make comments - are we offline?')
|
||||||
|
|
||||||
content = settings.paths.get('content')
|
content = settings.paths.get('content')
|
||||||
worker = AsyncWorker()
|
worker = AsyncWorker()
|
||||||
|
@ -1688,7 +1724,7 @@ def make():
|
||||||
worker.add(category.render())
|
worker.add(category.render())
|
||||||
|
|
||||||
worker.run()
|
worker.run()
|
||||||
settings.logger.info('worker finished')
|
logger.info('worker finished')
|
||||||
|
|
||||||
# copy static
|
# copy static
|
||||||
staticfiles = []
|
staticfiles = []
|
||||||
|
@ -1709,21 +1745,24 @@ def make():
|
||||||
cp(e, t)
|
cp(e, t)
|
||||||
|
|
||||||
end = int(round(time.time() * 1000))
|
end = int(round(time.time() * 1000))
|
||||||
settings.logger.info('process took %d ms' % (end - start))
|
logger.info('process took %d ms' % (end - start))
|
||||||
|
|
||||||
if not settings.args.get('nosync'):
|
if not settings.args.get('nosync'):
|
||||||
settings.logger.info('starting syncing')
|
logger.info('starting syncing')
|
||||||
os.system(
|
os.system(
|
||||||
"rsync -avuhH --delete-after %s/ %s/" % (
|
"rsync -avuhH --delete-after %s/ %s/" % (
|
||||||
settings.paths.get('build'),
|
settings.paths.get('build'),
|
||||||
settings.syncserver
|
settings.syncserver
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
settings.logger.info('syncing finished')
|
logger.info('syncing finished')
|
||||||
|
|
||||||
settings.logger.info('sending webmentions')
|
logger.info('sending webmentions')
|
||||||
|
try:
|
||||||
webmentions.run()
|
webmentions.run()
|
||||||
settings.logger.info('sending webmentions finished')
|
except Exception as e:
|
||||||
|
logger.error('failed to send webmentions - are we offline?')
|
||||||
|
logger.info('sending webmentions finished')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -118,8 +118,10 @@ args = vars(_parser.parse_args())
|
||||||
|
|
||||||
loglevel = loglevels.get(args.get('loglevel'))
|
loglevel = loglevels.get(args.get('loglevel'))
|
||||||
|
|
||||||
logger = logging.getLogger("nasg")
|
logger = logging.getLogger('NASG')
|
||||||
logger.setLevel(loglevel)
|
logger.setLevel(loglevel)
|
||||||
console_handler = logging.StreamHandler()
|
console_handler = logging.StreamHandler()
|
||||||
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
console_handler.setFormatter(formatter)
|
||||||
logger.addHandler(console_handler)
|
logger.addHandler(console_handler)
|
||||||
logging.getLogger('asyncio').setLevel(loglevel)
|
logging.getLogger('asyncio').setLevel(loglevel)
|
||||||
|
|
|
@ -100,17 +100,32 @@
|
||||||
<ul>
|
<ul>
|
||||||
{% if category.previous %}
|
{% if category.previous %}
|
||||||
<li>
|
<li>
|
||||||
<a rel="prev" href="{{ category.previous.url }}">« {{ category.previous.label }}</a>
|
<a rel="prev" href="{{ category.previous.url }}">
|
||||||
|
<i>«</i>
|
||||||
|
<strong>{{ category.previous.label }}</strong>
|
||||||
|
{% if category.currentyear != category.previous.label %}
|
||||||
|
Jan - Dec
|
||||||
|
{% endif %}
|
||||||
|
</a>
|
||||||
</li>
|
</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<li>
|
<li>
|
||||||
<span class="current">{{ category.current }}</span>
|
<span class="current">
|
||||||
|
{{ category.current }}
|
||||||
|
{% if category.currentyear != category.current %}
|
||||||
|
Jan - Dec
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
</li>
|
</li>
|
||||||
|
|
||||||
{% if category.next %}
|
{% if category.next %}
|
||||||
<li>
|
<li>
|
||||||
<a rel="next" href="{{ category.next.url }}">{{ category.next.label }} »</a>
|
<a rel="next" href="{{ category.next.url }}">
|
||||||
|
<strong>{{ category.next.label }}</strong>
|
||||||
|
Jan - Dec
|
||||||
|
<i>»</i>
|
||||||
|
</a>
|
||||||
</li>
|
</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
|
|
@ -276,7 +276,10 @@ body > nav ul {
|
||||||
|
|
||||||
body > nav li {
|
body > nav li {
|
||||||
margin: 1em 0.6em;
|
margin: 1em 0.6em;
|
||||||
font-size: 1.6em;
|
}
|
||||||
|
|
||||||
|
body > nav i {
|
||||||
|
font-style:normal;
|
||||||
}
|
}
|
||||||
|
|
||||||
body > nav .current {
|
body > nav .current {
|
||||||
|
|
Loading…
Reference in a new issue