cleaned up syndication copy fetcher

This commit is contained in:
Peter Molnar 2019-05-22 20:54:10 +01:00
parent f1ef48b903
commit 5693585695

24
nasg.py
View file

@ -229,15 +229,9 @@ class Webmention(object):
) )
) )
@property
def syndication_fpath(self):
return self.fpath.replace('.ping', '.copy')
def check_syndication(self): def check_syndication(self):
""" this is very specific to webmention.io and brid.gy publish """ """ this is very specific to webmention.io and brid.gy publish """
if os.path.isfile(self.syndication_fpath):
logger.debug("syndication copy exist for %s", self.dpath)
return
if "fed.brid.gy" in self.target: if "fed.brid.gy" in self.target:
return return
if "brid.gy" not in self.target: if "brid.gy" not in self.target:
@ -247,31 +241,31 @@ class Webmention(object):
with open(self.fpath) as f: with open(self.fpath) as f:
txt = f.read() txt = f.read()
if "telegraph.p3k.io" not in txt: if "telegraph.p3k.io" not in txt:
return return
try: try:
maybe = json.loads(txt) maybe = json.loads(txt)
if "location" not in maybe: if "location" not in maybe:
return return
if "http_body" not in maybe: if "http_body" not in maybe:
logger.debug("trying to re-fetch %s for %s", maybe["location"], self.fpath)
wio = requests.get(maybe["location"]) wio = requests.get(maybe["location"])
if wio.status_code != requests.codes.ok: if wio.status_code != requests.codes.ok:
return return
maybe = wio.json() maybe = wio.json()
logger.debug("response: %s", maybe)
with open(self.fpath, "wt") as update: with open(self.fpath, "wt") as update:
update.write(json.dumps(maybe, sort_keys=True, indent=4)) update.write(json.dumps(maybe, sort_keys=True, indent=4))
if "url" in maybe["http_body"]: if "url" in maybe["http_body"]:
data = json.loads(maybe["http_body"]) data = json.loads(maybe["http_body"])
url = data["url"] url = data["url"]
sp = os.path.join( sp = os.path.join(self.dpath, "%s.copy" % url2slug(url, 200))
self.dpath, "%s.copy" % url2slug(url, 200) if os.path.exists(sp):
) return
with open(sp, "wt") as f: with open(sp, "wt") as f:
logger.info( logger.info("writing syndication copy %s to %s", url, sp)
"writing syndication copy URL %s to %s",
url,
sp
)
f.write(url) f.write(url)
except Exception as e: except Exception as e:
logger.error( logger.error(