cleaned up syndication copy fetcher
This commit is contained in:
parent
f1ef48b903
commit
5693585695
1 changed files with 32 additions and 38 deletions
70
nasg.py
70
nasg.py
|
@ -229,15 +229,9 @@ class Webmention(object):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def syndication_fpath(self):
|
|
||||||
return self.fpath.replace('.ping', '.copy')
|
|
||||||
|
|
||||||
def check_syndication(self):
|
def check_syndication(self):
|
||||||
""" this is very specific to webmention.io and brid.gy publish """
|
""" this is very specific to webmention.io and brid.gy publish """
|
||||||
if os.path.isfile(self.syndication_fpath):
|
|
||||||
logger.debug("syndication copy exist for %s", self.dpath)
|
|
||||||
return
|
|
||||||
if "fed.brid.gy" in self.target:
|
if "fed.brid.gy" in self.target:
|
||||||
return
|
return
|
||||||
if "brid.gy" not in self.target:
|
if "brid.gy" not in self.target:
|
||||||
|
@ -247,39 +241,39 @@ class Webmention(object):
|
||||||
|
|
||||||
with open(self.fpath) as f:
|
with open(self.fpath) as f:
|
||||||
txt = f.read()
|
txt = f.read()
|
||||||
if "telegraph.p3k.io" not in txt:
|
|
||||||
|
if "telegraph.p3k.io" not in txt:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
maybe = json.loads(txt)
|
||||||
|
if "location" not in maybe:
|
||||||
return
|
return
|
||||||
try:
|
if "http_body" not in maybe:
|
||||||
maybe = json.loads(txt)
|
logger.debug("trying to re-fetch %s for %s", maybe["location"], self.fpath)
|
||||||
if "location" not in maybe:
|
wio = requests.get(maybe["location"])
|
||||||
|
if wio.status_code != requests.codes.ok:
|
||||||
return
|
return
|
||||||
if "http_body" not in maybe:
|
maybe = wio.json()
|
||||||
wio = requests.get(maybe["location"])
|
logger.debug("response: %s", maybe)
|
||||||
if wio.status_code != requests.codes.ok:
|
with open(self.fpath, "wt") as update:
|
||||||
return
|
update.write(json.dumps(maybe, sort_keys=True, indent=4))
|
||||||
maybe = wio.json()
|
if "url" in maybe["http_body"]:
|
||||||
with open(self.fpath, "wt") as update:
|
data = json.loads(maybe["http_body"])
|
||||||
update.write(json.dumps(maybe, sort_keys=True, indent=4))
|
url = data["url"]
|
||||||
if "url" in maybe["http_body"]:
|
sp = os.path.join(self.dpath, "%s.copy" % url2slug(url, 200))
|
||||||
data = json.loads(maybe["http_body"])
|
if os.path.exists(sp):
|
||||||
url = data["url"]
|
return
|
||||||
sp = os.path.join(
|
with open(sp, "wt") as f:
|
||||||
self.dpath, "%s.copy" % url2slug(url, 200)
|
logger.info("writing syndication copy %s to %s", url, sp)
|
||||||
)
|
f.write(url)
|
||||||
with open(sp, "wt") as f:
|
except Exception as e:
|
||||||
logger.info(
|
logger.error(
|
||||||
"writing syndication copy URL %s to %s",
|
"failed to fetch syndication URL for %s: %s",
|
||||||
url,
|
self.dpath,
|
||||||
sp
|
e
|
||||||
)
|
)
|
||||||
f.write(url)
|
pass
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
"failed to fetch syndication URL for %s: %s",
|
|
||||||
self.dpath,
|
|
||||||
e
|
|
||||||
)
|
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def exists(self):
|
def exists(self):
|
||||||
|
|
Loading…
Reference in a new issue