micropub queue support added; licence now links to spdx.org and uses the identifiers from there
This commit is contained in:
parent
4cb7623355
commit
3bc95edebc
7 changed files with 388 additions and 181 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -2,4 +2,4 @@ __pycache__
|
||||||
_scratch
|
_scratch
|
||||||
keys.py
|
keys.py
|
||||||
.idea
|
.idea
|
||||||
.venv
|
lib
|
||||||
|
|
388
nasg.py
388
nasg.py
|
@ -67,28 +67,41 @@ RE_PRECODE = re.compile(
|
||||||
r'<pre class="([^"]+)"><code>'
|
r'<pre class="([^"]+)"><code>'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def url2slug(url, limit=200):
|
||||||
|
return slugify(
|
||||||
|
re.sub(r"^https?://(?:www)?", "", url),
|
||||||
|
only_ascii=True,
|
||||||
|
lower=True
|
||||||
|
)[:limit]
|
||||||
|
|
||||||
def writepath(fpath, content, mtime=0):
|
def writepath(fpath, content, mtime=0):
|
||||||
d = os.path.dirname(fpath)
|
d = os.path.dirname(fpath)
|
||||||
if not os.path.isdir(d):
|
if not os.path.isdir(d):
|
||||||
logger.debug('creating directory tree %s', d)
|
logger.debug('creating directory tree %s', d)
|
||||||
os.makedirs(d)
|
os.makedirs(d)
|
||||||
with open(fpath, 'wt') as f:
|
|
||||||
|
if isinstance(content, str):
|
||||||
|
mode = 'wt'
|
||||||
|
else:
|
||||||
|
mode = 'wb'
|
||||||
|
|
||||||
|
with open(fpath, mode) as f:
|
||||||
logger.info('writing file %s', fpath)
|
logger.info('writing file %s', fpath)
|
||||||
f.write(content)
|
f.write(content)
|
||||||
# TODO
|
# TODO
|
||||||
#if (mtime > 0):
|
# if (mtime > 0):
|
||||||
|
|
||||||
|
|
||||||
#def relurl(url,base=settings.site.get('url')):
|
# def relurl(url,base=settings.site.get('url')):
|
||||||
#url =urlparse(url)
|
#url =urlparse(url)
|
||||||
#base = urlparse(base)
|
#base = urlparse(base)
|
||||||
|
|
||||||
#if base.netloc != url.netloc:
|
# if base.netloc != url.netloc:
|
||||||
#raise ValueError('target and base netlocs do not match')
|
#raise ValueError('target and base netlocs do not match')
|
||||||
|
|
||||||
#base_dir='.%s' % (os.path.dirname(base.path))
|
#base_dir='.%s' % (os.path.dirname(base.path))
|
||||||
#url = '.%s' % (url.path)
|
#url = '.%s' % (url.path)
|
||||||
#return os.path.relpath(url,start=base_dir)
|
# return os.path.relpath(url,start=base_dir)
|
||||||
|
|
||||||
class cached_property(object):
|
class cached_property(object):
|
||||||
""" extermely simple cached_property decorator:
|
""" extermely simple cached_property decorator:
|
||||||
|
@ -96,6 +109,7 @@ class cached_property(object):
|
||||||
result is calculated, then the class method is overwritten to be
|
result is calculated, then the class method is overwritten to be
|
||||||
a property, contaning the result from the method
|
a property, contaning the result from the method
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, method, name=None):
|
def __init__(self, method, name=None):
|
||||||
self.method = method
|
self.method = method
|
||||||
self.name = name or method.__name__
|
self.name = name or method.__name__
|
||||||
|
@ -107,6 +121,7 @@ class cached_property(object):
|
||||||
setattr(inst, self.name, result)
|
setattr(inst, self.name, result)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class Webmention(object):
|
class Webmention(object):
|
||||||
def __init__(self, source, target, stime):
|
def __init__(self, source, target, stime):
|
||||||
self.source = source
|
self.source = source
|
||||||
|
@ -132,10 +147,10 @@ class Webmention(object):
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def save(self, content):
|
def save(self, content):
|
||||||
writepath(self.fpath, content)
|
writepath(self.fpath, content)
|
||||||
|
|
||||||
async def send(self):
|
def send(self):
|
||||||
if self.exists:
|
if self.exists:
|
||||||
return
|
return
|
||||||
telegraph_url = 'https://telegraph.p3k.io/webmention'
|
telegraph_url = 'https://telegraph.p3k.io/webmention'
|
||||||
|
@ -153,7 +168,7 @@ class Webmention(object):
|
||||||
if r.status_code not in [200, 201, 202]:
|
if r.status_code not in [200, 201, 202]:
|
||||||
logger.error('sending failed: %s %s', r.status_code, r.text)
|
logger.error('sending failed: %s %s', r.status_code, r.text)
|
||||||
else:
|
else:
|
||||||
await self.save(r.text)
|
self.save(r.text)
|
||||||
|
|
||||||
|
|
||||||
class MarkdownDoc(object):
|
class MarkdownDoc(object):
|
||||||
|
@ -174,7 +189,8 @@ class MarkdownDoc(object):
|
||||||
|
|
||||||
def __pandoc(self, c):
|
def __pandoc(self, c):
|
||||||
c = Pandoc(c)
|
c = Pandoc(c)
|
||||||
c = RE_PRECODE.sub('<pre><code lang="\g<1>" class="language-\g<1>">', c)
|
c = RE_PRECODE.sub(
|
||||||
|
'<pre><code lang="\g<1>" class="language-\g<1>">', c)
|
||||||
return c
|
return c
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
|
@ -193,6 +209,7 @@ class MarkdownDoc(object):
|
||||||
c = c.replace(match, '')
|
c = c.replace(match, '')
|
||||||
return self.__pandoc(c)
|
return self.__pandoc(c)
|
||||||
|
|
||||||
|
|
||||||
class Comment(MarkdownDoc):
|
class Comment(MarkdownDoc):
|
||||||
def __init__(self, fpath):
|
def __init__(self, fpath):
|
||||||
self.fpath = fpath
|
self.fpath = fpath
|
||||||
|
@ -614,16 +631,8 @@ class Singular(MarkdownDoc):
|
||||||
self.content,
|
self.content,
|
||||||
])
|
])
|
||||||
|
|
||||||
#async def update(self):
|
|
||||||
#fm = frontmatter.loads('')
|
|
||||||
#fm.metadata = self.meta
|
|
||||||
#fm.content = self.content
|
|
||||||
#with open(fpath, 'wt') as f:
|
|
||||||
#logger.info("updating %s", fpath)
|
|
||||||
#f.write(frontmatter.dumps(fm))
|
|
||||||
|
|
||||||
async def copyfiles(self):
|
async def copyfiles(self):
|
||||||
exclude=['.md', '.jpg', '.png', '.gif'];
|
exclude = ['.md', '.jpg', '.png', '.gif']
|
||||||
files = glob.glob(os.path.join(
|
files = glob.glob(os.path.join(
|
||||||
os.path.dirname(self.fpath),
|
os.path.dirname(self.fpath),
|
||||||
'*.*'
|
'*.*'
|
||||||
|
@ -638,7 +647,8 @@ class Singular(MarkdownDoc):
|
||||||
self.name,
|
self.name,
|
||||||
os.path.basename(f)
|
os.path.basename(f)
|
||||||
)
|
)
|
||||||
if os.path.exists(t) and os.path.getmtime(f) <= os.path.getmtime(t):
|
if os.path.exists(t) and os.path.getmtime(
|
||||||
|
f) <= os.path.getmtime(t):
|
||||||
continue
|
continue
|
||||||
logger.info("copying '%s' to '%s'", f, t)
|
logger.info("copying '%s' to '%s'", f, t)
|
||||||
cp(f, t)
|
cp(f, t)
|
||||||
|
@ -999,12 +1009,13 @@ class AsyncWorker(object):
|
||||||
self._loop = asyncio.get_event_loop()
|
self._loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
def add(self, job):
|
def add(self, job):
|
||||||
task = self._loop.create_task(job)
|
#task = self._loop.create_task(job)
|
||||||
self._tasks.append(task)
|
self._tasks.append(asyncio.ensure_future(job))
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self._loop.run_until_complete(asyncio.wait(self._tasks))
|
self._loop.run_until_complete(asyncio.wait(self._tasks))
|
||||||
|
|
||||||
|
|
||||||
class PHPFile(object):
|
class PHPFile(object):
|
||||||
@property
|
@property
|
||||||
def exists(self):
|
def exists(self):
|
||||||
|
@ -1066,8 +1077,10 @@ class Search(PHPFile):
|
||||||
tokenize=porter
|
tokenize=porter
|
||||||
)'''
|
)'''
|
||||||
)
|
)
|
||||||
|
self.is_changed = False
|
||||||
|
|
||||||
def __exit__(self):
|
def __exit__(self):
|
||||||
|
if self.is_changed:
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
self.db.execute('PRAGMA auto_vacuum;')
|
self.db.execute('PRAGMA auto_vacuum;')
|
||||||
self.db.close()
|
self.db.close()
|
||||||
|
@ -1112,6 +1125,7 @@ class Search(PHPFile):
|
||||||
category,
|
category,
|
||||||
content
|
content
|
||||||
))
|
))
|
||||||
|
self.is_changed = True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def renderfile(self):
|
def renderfile(self):
|
||||||
|
@ -1170,9 +1184,7 @@ class IndexPHP(PHPFile):
|
||||||
'gones': self.gone,
|
'gones': self.gone,
|
||||||
'redirects': self.redirect
|
'redirects': self.redirect
|
||||||
})
|
})
|
||||||
with open(self.renderfile, 'wt') as f:
|
writepath(self.renderfile, r)
|
||||||
logger.info("rendering to %s", self.renderfile)
|
|
||||||
f.write(r)
|
|
||||||
|
|
||||||
|
|
||||||
class WebhookPHP(PHPFile):
|
class WebhookPHP(PHPFile):
|
||||||
|
@ -1193,9 +1205,27 @@ class WebhookPHP(PHPFile):
|
||||||
'webmentionio': keys.webmentionio,
|
'webmentionio': keys.webmentionio,
|
||||||
'zapier': keys.zapier,
|
'zapier': keys.zapier,
|
||||||
})
|
})
|
||||||
with open(self.renderfile, 'wt') as f:
|
writepath(self.renderfile, r)
|
||||||
logger.info("rendering to %s", self.renderfile)
|
|
||||||
f.write(r)
|
|
||||||
|
class MicropubPHP(PHPFile):
|
||||||
|
@property
|
||||||
|
def renderfile(self):
|
||||||
|
return os.path.join(
|
||||||
|
settings.paths.get('build'),
|
||||||
|
'micropub.php'
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def templatefile(self):
|
||||||
|
return 'Micropub.j2.php'
|
||||||
|
|
||||||
|
async def _render(self):
|
||||||
|
r = J2.get_template(self.templatefile).render({
|
||||||
|
'site': settings.site,
|
||||||
|
'paths': settings.paths
|
||||||
|
})
|
||||||
|
writepath(self.renderfile, r)
|
||||||
|
|
||||||
|
|
||||||
class Category(dict):
|
class Category(dict):
|
||||||
|
@ -1215,12 +1245,6 @@ class Category(dict):
|
||||||
)
|
)
|
||||||
dict.__setitem__(self, key, value)
|
dict.__setitem__(self, key, value)
|
||||||
|
|
||||||
def get_posts(self, start=0, end=-1):
|
|
||||||
return [
|
|
||||||
self[k].tmplvars
|
|
||||||
for k in self.sortedkeys[start:end]
|
|
||||||
]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sortedkeys(self):
|
def sortedkeys(self):
|
||||||
return list(sorted(self.keys(), reverse=True))
|
return list(sorted(self.keys(), reverse=True))
|
||||||
|
@ -1263,6 +1287,50 @@ class Category(dict):
|
||||||
else:
|
else:
|
||||||
return settings.paths.get('build')
|
return settings.paths.get('build')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mtime(self):
|
||||||
|
return arrow.get(self[self.sortedkeys[0]].published).timestamp
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rssfeedfpath(self):
|
||||||
|
return os.path.join(
|
||||||
|
self.dpath,
|
||||||
|
'feed',
|
||||||
|
'index.xml'
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def atomfeedfpath(self):
|
||||||
|
return os.path.join(
|
||||||
|
self.dpath,
|
||||||
|
'feed',
|
||||||
|
'atom.xml'
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_posts(self, start=0, end=-1):
|
||||||
|
return [
|
||||||
|
self[k].tmplvars
|
||||||
|
for k in self.sortedkeys[start:end]
|
||||||
|
]
|
||||||
|
|
||||||
|
def is_uptodate(self, fpath, ts):
|
||||||
|
if settings.args.get('force'):
|
||||||
|
return False
|
||||||
|
if not os.path.exists(fpath):
|
||||||
|
return False
|
||||||
|
if os.path.getmtime(fpath) >= ts:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def newest(self, start=0, end=-1):
|
||||||
|
if start == end:
|
||||||
|
end = -1
|
||||||
|
s = sorted(
|
||||||
|
[self[k].mtime for k in self.sortedkeys[start:end]],
|
||||||
|
reverse=True
|
||||||
|
)
|
||||||
|
return s[0]
|
||||||
|
|
||||||
def navlink(self, ts):
|
def navlink(self, ts):
|
||||||
label = ts.format(self.trange)
|
label = ts.format(self.trange)
|
||||||
if arrow.utcnow().format(self.trange) == label:
|
if arrow.utcnow().format(self.trange) == label:
|
||||||
|
@ -1305,43 +1373,6 @@ class Category(dict):
|
||||||
'posts': posts,
|
'posts': posts,
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
|
||||||
def mtime(self):
|
|
||||||
return arrow.get(self[self.sortedkeys[0]].published).timestamp
|
|
||||||
|
|
||||||
# @property
|
|
||||||
# def exists(self):
|
|
||||||
# if settings.args.get('force'):
|
|
||||||
# return False
|
|
||||||
# ismissing = False
|
|
||||||
# for f in [
|
|
||||||
# os.path.join(self.renderdir, 'feed', 'index.xml'),
|
|
||||||
# ]:
|
|
||||||
# if not os.path.exists(f):
|
|
||||||
# ismissing = True
|
|
||||||
# elif self.mtime > os.path.getmtime(f):
|
|
||||||
# ismissing = True
|
|
||||||
# if ismissing:
|
|
||||||
# return False
|
|
||||||
# else:
|
|
||||||
# return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rssfeedfpath(self):
|
|
||||||
return os.path.join(
|
|
||||||
self.dpath,
|
|
||||||
'feed',
|
|
||||||
'index.xml'
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def atomfeedfpath(self):
|
|
||||||
return os.path.join(
|
|
||||||
self.dpath,
|
|
||||||
'feed',
|
|
||||||
'atom.xml'
|
|
||||||
)
|
|
||||||
|
|
||||||
def indexfpath(self, subpath=None):
|
def indexfpath(self, subpath=None):
|
||||||
if subpath:
|
if subpath:
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
|
@ -1385,7 +1416,7 @@ class Category(dict):
|
||||||
|
|
||||||
fe.author({
|
fe.author({
|
||||||
'name': settings.author.get('name'),
|
'name': settings.author.get('name'),
|
||||||
'email':settings.author.get('email')
|
'email': settings.author.get('email')
|
||||||
})
|
})
|
||||||
|
|
||||||
fe.category({
|
fe.category({
|
||||||
|
@ -1418,40 +1449,79 @@ class Category(dict):
|
||||||
enc.get('mime')
|
enc.get('mime')
|
||||||
)
|
)
|
||||||
elif xmlformat == 'atom':
|
elif xmlformat == 'atom':
|
||||||
fe.link(href=post.get('url'), rel='alternate', type='text/html')
|
fe.link(
|
||||||
|
href=post.get('url'),
|
||||||
|
rel='alternate',
|
||||||
|
type='text/html')
|
||||||
fe.content(src=post.get('url'), type='text/html')
|
fe.content(src=post.get('url'), type='text/html')
|
||||||
fe.summary(post.get('summary'))
|
fe.summary(post.get('summary'))
|
||||||
|
|
||||||
if xmlformat == 'rss':
|
if xmlformat == 'rss':
|
||||||
fg.link(href=self.feedurl)
|
fg.link(href=self.feedurl)
|
||||||
writepath(self.rssfeedfpath, '%s' % fg.rss_str(pretty=True))
|
writepath(self.rssfeedfpath, fg.rss_str(pretty=True))
|
||||||
elif xmlformat == 'atom':
|
elif xmlformat == 'atom':
|
||||||
fg.link(href=self.feedurl, rel='self')
|
fg.link(href=self.feedurl, rel='self')
|
||||||
fg.link(href=settings.meta.get('hub'), rel='hub')
|
fg.link(href=settings.meta.get('hub'), rel='hub')
|
||||||
writepath(self.atomfeedfpath, '%s' % fg.atom_str(pretty=True))
|
writepath(self.atomfeedfpath, fg.atom_str(pretty=True))
|
||||||
|
|
||||||
async def render_flat(self):
|
async def render_flat(self):
|
||||||
r = J2.get_template(self.template).render(
|
r = J2.get_template(self.template).render(
|
||||||
self.tmplvars(self.get_posts())
|
self.tmplvars(self.get_posts())
|
||||||
#[self[k].tmplvars for k in self.sortedkeys]
|
|
||||||
)
|
)
|
||||||
writepath(self.indexfpath(), r)
|
writepath(self.indexfpath(), r)
|
||||||
|
|
||||||
def is_uptodate(self, fpath, ts):
|
async def render_archives(self):
|
||||||
if not os.path.exists(fpath):
|
by_time = {}
|
||||||
return False
|
for key in self.sortedkeys:
|
||||||
if os.path.getmtime(fpath) >= ts:
|
trange = arrow.get(key).format(self.trange)
|
||||||
return True
|
if trange not in by_time:
|
||||||
return False
|
by_time.update({
|
||||||
|
trange: []
|
||||||
|
})
|
||||||
|
by_time[trange].append(key)
|
||||||
|
|
||||||
def newest(self, start=0, end=-1):
|
keys = list(by_time.keys())
|
||||||
if start == end:
|
for p, c, n in zip([None] + keys[:-1], keys, keys[1:] + [None]):
|
||||||
end = -1
|
form = c.format(self.trange)
|
||||||
s = sorted(
|
if arrow.utcnow().format(self.trange) == form:
|
||||||
[self[k].mtime for k in self.sortedkeys[start:end]],
|
fpath = self.indexfpath()
|
||||||
reverse=True
|
else:
|
||||||
|
fpath = self.indexfpath(form)
|
||||||
|
|
||||||
|
try:
|
||||||
|
findex = self.sortedkeys.index(by_time[c][0])
|
||||||
|
lindex = self.sortedkeys.index(by_time[c][-1])
|
||||||
|
newest = self.newest(findex, lindex)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
'calling newest failed with %s for %s',
|
||||||
|
self.name,
|
||||||
|
c
|
||||||
)
|
)
|
||||||
return s[0]
|
continue
|
||||||
|
|
||||||
|
if self.is_uptodate(fpath, newest):
|
||||||
|
logger.info(
|
||||||
|
'%s/%s index is up to date',
|
||||||
|
self.name,
|
||||||
|
form
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'%s/%s index is outdated, generating new',
|
||||||
|
self.name,
|
||||||
|
form
|
||||||
|
)
|
||||||
|
r = J2.get_template(self.template).render(
|
||||||
|
self.tmplvars(
|
||||||
|
[self[k].tmplvars for k in by_time[c]],
|
||||||
|
c=c,
|
||||||
|
p=p,
|
||||||
|
n=n
|
||||||
|
)
|
||||||
|
)
|
||||||
|
writepath(fpath, r)
|
||||||
|
|
||||||
async def render(self):
|
async def render(self):
|
||||||
newest = self.newest()
|
newest = self.newest()
|
||||||
|
@ -1492,55 +1562,9 @@ class Category(dict):
|
||||||
self.name
|
self.name
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
by_time = {}
|
|
||||||
for key in self.sortedkeys:
|
|
||||||
trange = arrow.get(key).format(self.trange)
|
|
||||||
if trange not in by_time:
|
|
||||||
by_time.update({
|
|
||||||
trange: []
|
|
||||||
})
|
|
||||||
by_time[trange].append(key)
|
|
||||||
|
|
||||||
keys = list(by_time.keys())
|
|
||||||
for p, c, n in zip([None]+keys[:-1], keys, keys[1:]+[None]):
|
|
||||||
form = c.format(self.trange)
|
|
||||||
if arrow.utcnow().format(self.trange) == form:
|
|
||||||
fpath = self.indexfpath()
|
|
||||||
else:
|
else:
|
||||||
fpath = self.indexfpath(form)
|
await self.render_archives()
|
||||||
|
|
||||||
try:
|
|
||||||
findex = self.sortedkeys.index(by_time[c][0])
|
|
||||||
lindex = self.sortedkeys.index(by_time[c][-1])
|
|
||||||
newest = self.newest(findex, lindex)
|
|
||||||
except Exception as e:
|
|
||||||
#logger.info('newest called with start: %s, end: %s', start, end)
|
|
||||||
logger.error('calling newest failed with %s for %s', self.name, c)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if self.is_uptodate(fpath, newest):
|
|
||||||
logger.info(
|
|
||||||
'%s/%s index is up to date',
|
|
||||||
self.name,
|
|
||||||
form
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
'%s/%s index is outdated, generating new',
|
|
||||||
self.name,
|
|
||||||
form
|
|
||||||
)
|
|
||||||
r = J2.get_template(self.template).render(
|
|
||||||
self.tmplvars(
|
|
||||||
[self[k].tmplvars for k in by_time[c]],
|
|
||||||
c=c,
|
|
||||||
p=p,
|
|
||||||
n=n
|
|
||||||
)
|
|
||||||
)
|
|
||||||
writepath(fpath, r)
|
|
||||||
|
|
||||||
class Sitemap(dict):
|
class Sitemap(dict):
|
||||||
@property
|
@property
|
||||||
|
@ -1560,7 +1584,8 @@ class Sitemap(dict):
|
||||||
with open(self.renderfile, 'wt') as f:
|
with open(self.renderfile, 'wt') as f:
|
||||||
f.write("\n".join(sorted(self.keys())))
|
f.write("\n".join(sorted(self.keys())))
|
||||||
|
|
||||||
def mkcomment(webmention):
|
|
||||||
|
def makecomment(webmention):
|
||||||
if 'published_ts' in webmention.get('data'):
|
if 'published_ts' in webmention.get('data'):
|
||||||
maybe = webmention.get('data').get('published')
|
maybe = webmention.get('data').get('published')
|
||||||
if not maybe or maybe == 'None':
|
if not maybe or maybe == 'None':
|
||||||
|
@ -1569,6 +1594,8 @@ def mkcomment(webmention):
|
||||||
dt = arrow.get(webmention.get('data').get('published'))
|
dt = arrow.get(webmention.get('data').get('published'))
|
||||||
|
|
||||||
slug = webmention.get('target').strip('/').split('/')[-1]
|
slug = webmention.get('target').strip('/').split('/')[-1]
|
||||||
|
if slug == settings.site.get('domain'):
|
||||||
|
return
|
||||||
|
|
||||||
fdir = glob.glob(os.path.join(settings.paths.get('content'), '*', slug))
|
fdir = glob.glob(os.path.join(settings.paths.get('content'), '*', slug))
|
||||||
if not len(fdir):
|
if not len(fdir):
|
||||||
|
@ -1606,9 +1633,7 @@ def mkcomment(webmention):
|
||||||
fm.content = ''
|
fm.content = ''
|
||||||
else:
|
else:
|
||||||
fm.content = c
|
fm.content = c
|
||||||
with open(fpath, 'wt') as f:
|
writepath(fpath, frontmatter.dumps(fm))
|
||||||
logger.info("saving webmention to %s", fpath)
|
|
||||||
f.write(frontmatter.dumps(fm))
|
|
||||||
|
|
||||||
|
|
||||||
def makecomments():
|
def makecomments():
|
||||||
|
@ -1635,19 +1660,69 @@ def makecomments():
|
||||||
try:
|
try:
|
||||||
mentions = webmentions.json()
|
mentions = webmentions.json()
|
||||||
for webmention in mentions.get('links'):
|
for webmention in mentions.get('links'):
|
||||||
mkcomment(webmention)
|
makecomment(webmention)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
logger.error('failed to query webmention.io: %s', e)
|
logger.error('failed to query webmention.io: %s', e)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def makepost(fpath):
|
||||||
|
try:
|
||||||
|
fname = os.path.basename(fpath)
|
||||||
|
mtime = arrow.get(fname.split('.')[0])
|
||||||
|
with open(fpath, 'r') as f:
|
||||||
|
payload = json.loads(f.read())
|
||||||
|
pprint(payload)
|
||||||
|
if 'content' not in payload:
|
||||||
|
logger.error('missing content from %s', fname)
|
||||||
|
return False
|
||||||
|
|
||||||
def url2slug(url, limit=200):
|
fm = frontmatter.loads('')
|
||||||
return slugify(
|
fm.metadata = {
|
||||||
re.sub(r"^https?://(?:www)?", "", url),
|
'published': mtime.format(
|
||||||
only_ascii=True,
|
settings.dateformat.get('iso')
|
||||||
lower=True
|
),
|
||||||
)[:limit]
|
'tags': payload.get('category', [])
|
||||||
|
}
|
||||||
|
fm.content = payload.get('content')
|
||||||
|
|
||||||
|
for maybe in ['title', 'summary', 'in-reply-to']:
|
||||||
|
x = payload.get(maybe, None)
|
||||||
|
if x:
|
||||||
|
fm.metadata.update({maybe: x})
|
||||||
|
|
||||||
|
slug = payload.get('slug', '')
|
||||||
|
if not len(slug):
|
||||||
|
if 'in-reply-to' in fm.metadata:
|
||||||
|
slug = "re-%s" % (url2slug(fm.metadata.get('in-reply-to')))
|
||||||
|
else:
|
||||||
|
slug = mtime.format(settings.dateformat.get('fname'))
|
||||||
|
|
||||||
|
fpath = os.path.join(
|
||||||
|
settings.paths.get('micropub'),
|
||||||
|
slug,
|
||||||
|
'index.md'
|
||||||
|
)
|
||||||
|
writepath(fpath, frontmatter.dumps(fm))
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('parsing entry at %s failed: %s', fpath, e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def makeposts():
|
||||||
|
logger.info('getting micropub queue...')
|
||||||
|
os.system(
|
||||||
|
"rsync -avuhH --remove-source-files %s/ %s/" % (
|
||||||
|
'%s/%s' % (settings.syncserver, settings.paths.get('remotequeue')),
|
||||||
|
'%s' % (settings.paths.get('queue'))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logger.info('...done')
|
||||||
|
|
||||||
|
for js in glob.glob(os.path.join(settings.paths.get('queue'), '*.json')):
|
||||||
|
logging.info('processing micropub post %s', js)
|
||||||
|
if makepost(js):
|
||||||
|
os.unlink(js)
|
||||||
|
|
||||||
def make():
|
def make():
|
||||||
start = int(round(time.time() * 1000))
|
start = int(round(time.time() * 1000))
|
||||||
|
@ -1658,11 +1733,16 @@ def make():
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('failed to make comments - are we offline?')
|
logger.error('failed to make comments - are we offline?')
|
||||||
|
|
||||||
|
makeposts();
|
||||||
|
|
||||||
content = settings.paths.get('content')
|
content = settings.paths.get('content')
|
||||||
worker = AsyncWorker()
|
worker = AsyncWorker()
|
||||||
webmentions = AsyncWorker()
|
webmentions = []
|
||||||
rules = IndexPHP()
|
rules = IndexPHP()
|
||||||
|
|
||||||
|
micropub = MicropubPHP()
|
||||||
|
worker.add(micropub.render())
|
||||||
|
|
||||||
webhook = WebhookPHP()
|
webhook = WebhookPHP()
|
||||||
worker.add(webhook.render())
|
worker.add(webhook.render())
|
||||||
|
|
||||||
|
@ -1676,7 +1756,7 @@ def make():
|
||||||
for i in post.images.values():
|
for i in post.images.values():
|
||||||
worker.add(i.downsize())
|
worker.add(i.downsize())
|
||||||
for i in post.to_ping:
|
for i in post.to_ping:
|
||||||
webmentions.add(i.send())
|
webmentions.append(i)
|
||||||
|
|
||||||
worker.add(post.render())
|
worker.add(post.render())
|
||||||
worker.add(post.copyfiles())
|
worker.add(post.copyfiles())
|
||||||
|
@ -1706,8 +1786,7 @@ def make():
|
||||||
worker.add(search.render())
|
worker.add(search.render())
|
||||||
worker.add(sitemap.render())
|
worker.add(sitemap.render())
|
||||||
|
|
||||||
|
for e in glob.glob(os.path.join(content, '*', '*.del')):
|
||||||
for e in glob.glob(os.path.join(content, '*', '*.ptr')):
|
|
||||||
post = Gone(e)
|
post = Gone(e)
|
||||||
if post.mtime > last:
|
if post.mtime > last:
|
||||||
last = post.mtime
|
last = post.mtime
|
||||||
|
@ -1719,7 +1798,6 @@ def make():
|
||||||
rules.add_redirect(post.source, post.target)
|
rules.add_redirect(post.source, post.target)
|
||||||
worker.add(rules.render())
|
worker.add(rules.render())
|
||||||
|
|
||||||
|
|
||||||
for category in categories.values():
|
for category in categories.values():
|
||||||
worker.add(category.render())
|
worker.add(category.render())
|
||||||
|
|
||||||
|
@ -1730,8 +1808,6 @@ def make():
|
||||||
staticfiles = []
|
staticfiles = []
|
||||||
staticpaths = [
|
staticpaths = [
|
||||||
os.path.join(content, '*.*'),
|
os.path.join(content, '*.*'),
|
||||||
#os.path.join(settings.paths.get('tmpl'), '*.js'),
|
|
||||||
#os.path.join(settings.paths.get('tmpl'), '*.css')
|
|
||||||
]
|
]
|
||||||
for p in staticpaths:
|
for p in staticpaths:
|
||||||
staticfiles = staticfiles + glob.glob(p)
|
staticfiles = staticfiles + glob.glob(p)
|
||||||
|
@ -1752,14 +1828,16 @@ def make():
|
||||||
os.system(
|
os.system(
|
||||||
"rsync -avuhH --delete-after %s/ %s/" % (
|
"rsync -avuhH --delete-after %s/ %s/" % (
|
||||||
settings.paths.get('build'),
|
settings.paths.get('build'),
|
||||||
settings.syncserver
|
'%s/%s' % (settings.syncserver,
|
||||||
|
settings.paths.get('remotewww'))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
logger.info('syncing finished')
|
logger.info('syncing finished')
|
||||||
|
|
||||||
logger.info('sending webmentions')
|
logger.info('sending webmentions')
|
||||||
try:
|
try:
|
||||||
webmentions.run()
|
for i in webmentions:
|
||||||
|
i.send()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('failed to send webmentions - are we offline?')
|
logger.error('failed to send webmentions - are we offline?')
|
||||||
logger.info('sending webmentions finished')
|
logger.info('sending webmentions finished')
|
||||||
|
|
15
settings.py
15
settings.py
|
@ -4,7 +4,7 @@ import argparse
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
base = os.path.abspath(os.path.expanduser('~/Projects/petermolnar.net'))
|
base = os.path.abspath(os.path.expanduser('~/Projects/petermolnar.net'))
|
||||||
syncserver = 'liveserver:/web/petermolnar.net/web'
|
syncserver = 'liveserver:/web/petermolnar.net'
|
||||||
|
|
||||||
site = {
|
site = {
|
||||||
'title': 'Peter Molnar',
|
'title': 'Peter Molnar',
|
||||||
|
@ -16,7 +16,7 @@ site = {
|
||||||
'photo',
|
'photo',
|
||||||
'journal'
|
'journal'
|
||||||
],
|
],
|
||||||
'licence': 'by-nc-nd',
|
'licence': 'CC-BY-NC-ND-4.0',
|
||||||
}
|
}
|
||||||
|
|
||||||
categorydisplay = {
|
categorydisplay = {
|
||||||
|
@ -26,8 +26,8 @@ categorydisplay = {
|
||||||
}
|
}
|
||||||
|
|
||||||
licence = {
|
licence = {
|
||||||
'article': 'by',
|
'article': 'CC-BY-4.0',
|
||||||
'journal': 'by-nc',
|
'journal': 'CC-BY-NC-4.0',
|
||||||
}
|
}
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
|
@ -36,6 +36,8 @@ meta = {
|
||||||
'hub': 'https://petermolnar.superfeedr.com/',
|
'hub': 'https://petermolnar.superfeedr.com/',
|
||||||
'authorization_endpoint': 'https://indieauth.com/auth',
|
'authorization_endpoint': 'https://indieauth.com/auth',
|
||||||
'token_endpoint': 'https://tokens.indieauth.com/token',
|
'token_endpoint': 'https://tokens.indieauth.com/token',
|
||||||
|
'micropub': 'https://petermolnar.net/micropub.php',
|
||||||
|
'microsub': 'https://aperture.p3k.io/microsub/83'
|
||||||
}
|
}
|
||||||
|
|
||||||
author = {
|
author = {
|
||||||
|
@ -62,6 +64,10 @@ paths = {
|
||||||
'tmpl': os.path.join(base, 'nasg', 'templates'),
|
'tmpl': os.path.join(base, 'nasg', 'templates'),
|
||||||
'watermark': os.path.join(base, 'nasg', 'templates', 'watermark.png'),
|
'watermark': os.path.join(base, 'nasg', 'templates', 'watermark.png'),
|
||||||
'build': os.path.join(base, 'www'),
|
'build': os.path.join(base, 'www'),
|
||||||
|
'queue': os.path.join(base, 'queue'),
|
||||||
|
'remotewww': 'web',
|
||||||
|
'remotequeue': 'queue',
|
||||||
|
'micropub': os.path.join(base, 'content', 'note'),
|
||||||
}
|
}
|
||||||
|
|
||||||
photo = {
|
photo = {
|
||||||
|
@ -83,6 +89,7 @@ tips = {
|
||||||
dateformat = {
|
dateformat = {
|
||||||
'iso': 'YYYY-MM-DDTHH:mm:ssZZ',
|
'iso': 'YYYY-MM-DDTHH:mm:ssZZ',
|
||||||
'display': 'YYYY-MM-DD HH:mm',
|
'display': 'YYYY-MM-DD HH:mm',
|
||||||
|
'fname': 'YYYYMMDDHHmmssZ',
|
||||||
}
|
}
|
||||||
|
|
||||||
loglevels = {
|
loglevels = {
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
{% extends "base.j2.html" %}
|
{% extends "base.j2.html" %}
|
||||||
{% block lang %}{% endblock %}
|
{% block lang %}{% endblock %}
|
||||||
|
|
||||||
|
{% block licence %}
|
||||||
|
<link rel="license" href="https://spdx.org/licenses/{{ site.licence }}.html" type="{{ site.licence }}" />
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
{% block title %}{{ category.title }}{% endblock %}
|
{% block title %}{{ category.title }}{% endblock %}
|
||||||
{% block meta %}
|
{% block meta %}
|
||||||
<link rel="alternate" type="application/rss+xml" title="{{ category.title }} RSS feed" href="{{ category.feed }}" />
|
<link rel="alternate" type="application/rss+xml" title="{{ category.title }} RSS feed" href="{{ category.feed }}" />
|
||||||
|
|
114
templates/Micropub.j2.php
Normal file
114
templates/Micropub.j2.php
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
<?php
|
||||||
|
|
||||||
|
function _syslog($msg) {
|
||||||
|
$trace = debug_backtrace();
|
||||||
|
$caller = $trace[1];
|
||||||
|
$parent = $caller['function'];
|
||||||
|
if (isset($caller['class']))
|
||||||
|
$parent = $caller['class'] . '::' . $parent;
|
||||||
|
|
||||||
|
return error_log( "{$parent}: {$msg}" );
|
||||||
|
}
|
||||||
|
|
||||||
|
function unauthorized($text) {
|
||||||
|
header('HTTP/1.1 401 Unauthorized');
|
||||||
|
die($text);
|
||||||
|
}
|
||||||
|
|
||||||
|
function badrequest($text) {
|
||||||
|
header('HTTP/1.1 400 Bad Request');
|
||||||
|
die($text);
|
||||||
|
}
|
||||||
|
|
||||||
|
function httpok($text) {
|
||||||
|
header('HTTP/1.1 200 OK');
|
||||||
|
echo($text);
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
function accepted() {
|
||||||
|
header('HTTP/1.1 202 Accepted');
|
||||||
|
#header('Location: {{ site.url }}');
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!empty($_GET)) {
|
||||||
|
if ( ! isset($_GET['q']) ) {
|
||||||
|
badrequest('please POST a micropub request');
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( isset($_GET['q']['config']) ) {
|
||||||
|
httpok(json_encode(array('tags' => array())));
|
||||||
|
}
|
||||||
|
|
||||||
|
if(isset($_GET['q']['syndicate-to'])) {
|
||||||
|
httpok(json_encode(array('syndicate-to' => array())));
|
||||||
|
}
|
||||||
|
|
||||||
|
badrequest('please POST a micropub request');
|
||||||
|
}
|
||||||
|
|
||||||
|
$raw = file_get_contents("php://input");
|
||||||
|
print_r($raw);
|
||||||
|
try {
|
||||||
|
$decoded = json_decode($raw, true);
|
||||||
|
} catch (Exception $e) {
|
||||||
|
_syslog('failed to decode JSON, trying decoding form data');
|
||||||
|
try {
|
||||||
|
parse_str($raw, $decoded);
|
||||||
|
}
|
||||||
|
catch (Exception $e) {
|
||||||
|
_syslog('failed to decoding form data as well');
|
||||||
|
badrequest('invalid POST contents');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
print_r($decoded);
|
||||||
|
|
||||||
|
$token = '';
|
||||||
|
if ( isset($decoded['access_token']) ) {
|
||||||
|
$token = $decoded['access_token'];
|
||||||
|
unset($decoded['access_token']);
|
||||||
|
}
|
||||||
|
elseif ( isset($_SERVER['HTTP_AUTHORIZATION']) ) {
|
||||||
|
$token = trim(str_replace('Bearer', '', $_SERVER['HTTP_AUTHORIZATION']));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (empty($token)) {
|
||||||
|
unauthorized('missing token');
|
||||||
|
}
|
||||||
|
|
||||||
|
$request = curl_init();
|
||||||
|
curl_setopt($request, CURLOPT_URL, 'https://tokens.indieauth.com/token');
|
||||||
|
curl_setopt($request, CURLOPT_HTTPHEADER, array(
|
||||||
|
'Content-Type: application/x-www-form-urlencoded',
|
||||||
|
sprintf('Authorization: Bearer %s', $token)
|
||||||
|
));
|
||||||
|
curl_setopt($request, CURLOPT_RETURNTRANSFER, 1);
|
||||||
|
$response = curl_exec($request);
|
||||||
|
curl_close($request);
|
||||||
|
parse_str(urldecode($response), $verification);
|
||||||
|
if (! isset($verification['scope']) ) {
|
||||||
|
unauthorized('missing "scope"');
|
||||||
|
}
|
||||||
|
if (! isset($verification['me']) ) {
|
||||||
|
unauthorized('missing "me"');
|
||||||
|
}
|
||||||
|
if ( ! stristr($verification['me'], '{{ site.url }}') ) {
|
||||||
|
unauthorized('wrong domain');
|
||||||
|
}
|
||||||
|
if ( ! stristr($verification['scope'], 'create') ) {
|
||||||
|
unauthorized('invalid scope');
|
||||||
|
}
|
||||||
|
|
||||||
|
$user = posix_getpwuid(posix_getuid());
|
||||||
|
$now = time();
|
||||||
|
$decoded['mtime'] = $now;
|
||||||
|
$fname = sprintf(
|
||||||
|
'%s/%s/%s.json',
|
||||||
|
$user['dir'],
|
||||||
|
'{{ paths.remotequeue }}',
|
||||||
|
microtime(true)
|
||||||
|
);
|
||||||
|
|
||||||
|
file_put_contents($fname, json_encode($decoded, JSON_PRETTY_PRINT));
|
||||||
|
accepted();
|
|
@ -3,7 +3,6 @@
|
||||||
<meta name="author" content="{{ author.name }} <{{ author.email }}>" />
|
<meta name="author" content="{{ author.name }} <{{ author.email }}>" />
|
||||||
<meta name="description" content="{{ post.summary|e }}" />
|
<meta name="description" content="{{ post.summary|e }}" />
|
||||||
<link rel="canonical" href="{{ post.url }}" />
|
<link rel="canonical" href="{{ post.url }}" />
|
||||||
<link rel="license" href="https://creativecommons.org/licenses/4.0/{{ post.licence }}" />
|
|
||||||
{% if post.has_code %}
|
{% if post.has_code %}
|
||||||
<style media="all">
|
<style media="all">
|
||||||
{% include 'prism.css' %}
|
{% include 'prism.css' %}
|
||||||
|
@ -17,3 +16,6 @@
|
||||||
</script>
|
</script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
{% block licence %}
|
||||||
|
<link rel="license" href="https://spdx.org/licenses/{{ post.licence }}.html" type="{{ post.licence }}" />
|
||||||
|
{% endblock %}
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width,initial-scale=1,minimum-scale=1" />
|
<meta name="viewport" content="width=device-width,initial-scale=1,minimum-scale=1" />
|
||||||
<link rel="icon" href="{{ site.url }}/favicon.ico" />
|
<link rel="icon" href="{{ site.url }}/favicon.ico" />
|
||||||
|
{% block licence %}{% endblock %}
|
||||||
{% for key, value in meta.items() %}
|
{% for key, value in meta.items() %}
|
||||||
<link rel="{{ key }}" href="{{ value }}" />
|
<link rel="{{ key }}" href="{{ value }}" />
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -12,7 +13,7 @@
|
||||||
<style media="all">
|
<style media="all">
|
||||||
{% include 'style.css' %}
|
{% include 'style.css' %}
|
||||||
</style>
|
</style>
|
||||||
<style id="css_alt" media="aural">
|
<style id="css_alt" media="speech">
|
||||||
{% include 'style-alt.css' %}
|
{% include 'style-alt.css' %}
|
||||||
</style>
|
</style>
|
||||||
<style media="print">
|
<style media="print">
|
||||||
|
@ -31,7 +32,7 @@
|
||||||
var setto = 'all';
|
var setto = 'all';
|
||||||
var e = document.querySelector('#css_alt');
|
var e = document.querySelector('#css_alt');
|
||||||
if (e.getAttribute("media") == 'all') {
|
if (e.getAttribute("media") == 'all') {
|
||||||
setto = 'aural';
|
setto = 'speech';
|
||||||
}
|
}
|
||||||
localStorage.setItem("stylesheet", setto);
|
localStorage.setItem("stylesheet", setto);
|
||||||
e.setAttribute("media", setto);
|
e.setAttribute("media", setto);
|
||||||
|
@ -231,8 +232,8 @@
|
||||||
|
|
||||||
<dt>License</dt>
|
<dt>License</dt>
|
||||||
<dd class="license">
|
<dd class="license">
|
||||||
{% if post.licence == 'by' %}
|
{% if post.licence == 'CC-BY-4.0' %}
|
||||||
<a rel="license" href="https://creativecommons.org/licenses/by/4.0/" class="u-license" property="u-licence" itemprop="license">CC BY 4.0</a>
|
<a rel="license" href="https://creativecommons.org/licenses/by/4.0/" class="u-license" property="u-licence" itemprop="license">{{ post.licence }}</a>
|
||||||
<ul>
|
<ul>
|
||||||
<li>you can share it</li>
|
<li>you can share it</li>
|
||||||
<li>you can republish it</li>
|
<li>you can republish it</li>
|
||||||
|
@ -240,8 +241,8 @@
|
||||||
<li>you can use it for commercial purposes</li>
|
<li>you can use it for commercial purposes</li>
|
||||||
<li>you always need to make a link back here</li>
|
<li>you always need to make a link back here</li>
|
||||||
</ul>
|
</ul>
|
||||||
{% elif post.licence.text == 'by-nc' %}
|
{% elif post.licence == 'CC-BY-NC-4.0' %}
|
||||||
<a rel="license" href="https://creativecommons.org/licenses/by-nc/4.0/" class="u-license" property="u-licence" itemprop="license">CC BY-NC 4.0</a>
|
<a rel="license" href="https://creativecommons.org/licenses/by-nc/4.0/" class="u-license" property="u-licence" itemprop="license">{{ post.licence }}</a>
|
||||||
<ul>
|
<ul>
|
||||||
<li>you can share it</li>
|
<li>you can share it</li>
|
||||||
<li>you can republish it</li>
|
<li>you can republish it</li>
|
||||||
|
@ -250,8 +251,8 @@
|
||||||
<li>you always need to make a link back here</li>
|
<li>you always need to make a link back here</li>
|
||||||
</ul>
|
</ul>
|
||||||
For commercial use, please contact me.
|
For commercial use, please contact me.
|
||||||
{% else %}
|
{% elif post.licence == 'CC-BY-NC-ND-4.0' %}
|
||||||
<a rel="license" href="https://creativecommons.org/licenses/by-nc-nd/4.0/" class="u-license" property="u-licence" itemprop="license">CC BY-NC-ND 4.0</a>
|
<a rel="license" href="https://creativecommons.org/licenses/by-nc-nd/4.0/" class="u-license" property="u-licence" itemprop="license">{{ post.licence }}</a>
|
||||||
<ul>
|
<ul>
|
||||||
<li>you can share it</li>
|
<li>you can share it</li>
|
||||||
<li>you can't modify it</li>
|
<li>you can't modify it</li>
|
||||||
|
|
Loading…
Reference in a new issue