This commit is contained in:
Peter Molnar 2017-11-10 16:04:05 +00:00
parent 07830313e4
commit aaaf494063
3 changed files with 111 additions and 98 deletions

View file

@ -17,8 +17,10 @@ from oauthlib.oauth2 import BackendApplicationClient
import shared import shared
class LastFM(object): class LastFM(object):
url = 'http://ws.audioscrobbler.com/2.0/' url = 'http://ws.audioscrobbler.com/2.0/'
def __init__(self): def __init__(self):
self.service = 'lastfm' self.service = 'lastfm'
self.target = shared.config.get("api_%s" % self.service, 'logfile') self.target = shared.config.get("api_%s" % self.service, 'logfile')
@ -163,7 +165,6 @@ class FlickrFavs(Favs):
parsed = json.loads(r.text) parsed = json.loads(r.text)
self.uid = parsed.get('user', {}).get('id') self.uid = parsed.get('user', {}).get('id')
def getpaged(self, offset): def getpaged(self, offset):
logging.info('requesting page #%d of paginated results', offset) logging.info('requesting page #%d of paginated results', offset)
self.params.update({ self.params.update({
@ -199,6 +200,7 @@ class FlickrFavs(Favs):
fav.run() fav.run()
# fav.fix_extension() # fav.fix_extension()
class FivehpxFavs(Favs): class FivehpxFavs(Favs):
def __init__(self): def __init__(self):
super(FivehpxFavs, self).__init__('500px') super(FivehpxFavs, self).__init__('500px')
@ -234,7 +236,6 @@ class FivehpxFavs(Favs):
g = js.get('galleries', []).pop() g = js.get('galleries', []).pop()
self.galid = g.get('id') self.galid = g.get('id')
@property @property
def url(self): def url(self):
return 'https://api.500px.com/v1/users/%s/galleries/%s/items' % ( return 'https://api.500px.com/v1/users/%s/galleries/%s/items' % (
@ -353,8 +354,8 @@ class DAFavs(Favs):
@property @property
def url(self): def url(self):
return 'https://www.deviantart.com/api/v1/oauth2/collections/%s' % (self.galid) return 'https://www.deviantart.com/api/v1/oauth2/collections/%s' % (
self.galid)
def getpaged(self, offset): def getpaged(self, offset):
self.params.update({'offset': offset}) self.params.update({'offset': offset})
@ -381,7 +382,7 @@ class DAFavs(Favs):
try: try:
meta = json.loads(r.text) meta = json.loads(r.text)
return meta.get('metadata', []).pop() return meta.get('metadata', []).pop()
except: except BaseException:
return meta return meta
def has_more(self, q): def has_more(self, q):
@ -421,7 +422,8 @@ class DAFavs(Favs):
for fav in self.favs: for fav in self.favs:
f = DAFav(fav) f = DAFav(fav)
if not f.exists: if not f.exists:
f.fav.update({'meta': self.getsinglemeta(fav.get('deviationid'))}) f.fav.update(
{'meta': self.getsinglemeta(fav.get('deviationid'))})
f.run() f.run()
# f.fix_extension() # f.fix_extension()
@ -510,7 +512,7 @@ class ImgFav(object):
'-XMP:ReleaseDate=%s' % dt.format('YYYY:MM:DD HH:mm:ss'), '-XMP:ReleaseDate=%s' % dt.format('YYYY:MM:DD HH:mm:ss'),
'-XMP:Headline=%s' % self.meta.get('title'), '-XMP:Headline=%s' % self.meta.get('title'),
'-XMP:Description=%s' % self.content, '-XMP:Description=%s' % self.content,
]; ]
for t in tags: for t in tags:
params.append('-XMP:HierarchicalSubject+=%s' % t) params.append('-XMP:HierarchicalSubject+=%s' % t)
params.append('-XMP:Subject+=%s' % t) params.append('-XMP:Subject+=%s' % t)
@ -532,7 +534,7 @@ class ImgFav(object):
params.append('-GPSLatitude=%s' % abs(geo_lat)) params.append('-GPSLatitude=%s' % abs(geo_lat))
params.append('-GPSLongitudeRef=%s' % GPSLongitudeRef) params.append('-GPSLongitudeRef=%s' % GPSLongitudeRef)
params.append('-GPSLatitudeRef=%s' % GPSLatitudeRef) params.append('-GPSLatitudeRef=%s' % GPSLatitudeRef)
params.append(self.target); params.append(self.target)
p = subprocess.Popen( p = subprocess.Popen(
params, params,
@ -553,7 +555,8 @@ class FlickrFav(ImgFav):
self.photo = photo self.photo = photo
self.ownerid = photo.get('owner') self.ownerid = photo.get('owner')
self.photoid = photo.get('id') self.photoid = photo.get('id')
self.url = "https://www.flickr.com/photos/%s/%s" % (self.ownerid, self.photoid) self.url = "https://www.flickr.com/photos/%s/%s" % (
self.ownerid, self.photoid)
self.target = os.path.join( self.target = os.path.join(
shared.config.get('archive', 'favorite'), shared.config.get('archive', 'favorite'),
"flickr-%s-%s.jpg" % (self.ownerid, self.photoid) "flickr-%s-%s.jpg" % (self.ownerid, self.photoid)
@ -567,20 +570,11 @@ class FlickrFav(ImgFav):
# the bigger the better, see # the bigger the better, see
# https://www.flickr.com/services/api/misc.urls.html # https://www.flickr.com/services/api/misc.urls.html
img = self.photo.get( img = False
'url_o', for x in ['url_o', 'url_k', 'url_h', 'url_b', 'url_c', 'url_z']:
self.photo.get('url_k', if x in self.photo:
self.photo.get('url_h', img = self.photo.get(x)
self.photo.get('url_b', break
self.photo.get('url_c',
self.photo.get('url_z',
False
)
)
)
)
)
)
if not img: if not img:
logging.error("image url was empty for %s, skipping fav", self.url) logging.error("image url was empty for %s, skipping fav", self.url)
@ -617,6 +611,7 @@ class FlickrFav(ImgFav):
self.fix_extension() self.fix_extension()
self.write_exif() self.write_exif()
class FivehpxFav(ImgFav): class FivehpxFav(ImgFav):
def __init__(self, photo): def __init__(self, photo):
self.photo = photo self.photo = photo
@ -663,6 +658,7 @@ class FivehpxFav(ImgFav):
self.fix_extension() self.fix_extension()
self.write_exif() self.write_exif()
class DAFav(ImgFav): class DAFav(ImgFav):
def __init__(self, fav): def __init__(self, fav):
self.fav = fav self.fav = fav
@ -690,7 +686,9 @@ class DAFav(ImgFav):
def run(self): def run(self):
if not self.imgurl: if not self.imgurl:
logging.error('imgurl is empty for deviantart %s', self.deviationid) logging.error(
'imgurl is empty for deviantart %s',
self.deviationid)
return return
self.pull_image() self.pull_image()
@ -823,7 +821,8 @@ class Oauth1Flow(object):
self.request_oauth_token() self.request_oauth_token()
t = self.tokendb.get_token(self.t) t = self.tokendb.get_token(self.t)
if not t.get('access_token', None) or not t.get('access_token_secret', None): if not t.get('access_token', None) or not t.get(
'access_token_secret', None):
self.request_access_token() self.request_access_token()
def request_oauth_token(self): def request_oauth_token(self):
@ -885,11 +884,11 @@ class Oauth1Flow(object):
access_token_secret=r.get('oauth_token_secret') access_token_secret=r.get('oauth_token_secret')
) )
except oauth1_session.TokenRequestDenied as e: except oauth1_session.TokenRequestDenied as e:
logging.error('getting access token was denied, clearing former oauth tokens and re-running everyting') logging.error(
'getting access token was denied, clearing former oauth tokens and re-running everyting')
self.tokendb.clear_service(self.service) self.tokendb.clear_service(self.service)
self.oauth_init() self.oauth_init()
def request(self, url, params): def request(self, url, params):
t = self.tokendb.get_token(self.t) t = self.tokendb.get_token(self.t)
client = OAuth1Session( client = OAuth1Session(

15
nasg.py
View file

@ -64,6 +64,7 @@ class MagicPHP(object):
class NoDupeContainer(object): class NoDupeContainer(object):
''' Base class to hold keys => data dicts with errors on dupes ''' ''' Base class to hold keys => data dicts with errors on dupes '''
def __init__(self): def __init__(self):
self.data = {} self.data = {}
self.default = None self.default = None
@ -110,7 +111,7 @@ class NoDupeContainer(object):
def __next__(self): def __next__(self):
try: try:
r = self.data.next() r = self.data.next()
except: except BaseException:
raise StopIteration() raise StopIteration()
return r return r
@ -123,6 +124,7 @@ class NoDupeContainer(object):
class FContainer(NoDupeContainer): class FContainer(NoDupeContainer):
""" This is a container that holds a lists of files based on Container so """ This is a container that holds a lists of files based on Container so
it errors on duplicate slugs and is popolated with recorsive glob """ it errors on duplicate slugs and is popolated with recorsive glob """
def __init__(self, dirs, extensions=['*']): def __init__(self, dirs, extensions=['*']):
super().__init__() super().__init__()
files = [] files = []
@ -143,6 +145,7 @@ class Content(FContainer):
""" This is a container that holds markdown files that are parsed when the """ This is a container that holds markdown files that are parsed when the
container is populated on the fly; based on FContainer which is a Container container is populated on the fly; based on FContainer which is a Container
""" """
def __init__(self): def __init__(self):
dirs = [os.path.join(shared.config.get('dirs', 'content'), "**")] dirs = [os.path.join(shared.config.get('dirs', 'content'), "**")]
extensions = ['md', 'jpg'] extensions = ['md', 'jpg']
@ -492,7 +495,7 @@ class Singular(object):
self.meta.get('title', ''), self.meta.get('title', ''),
self.content self.content
])) ]))
except: except BaseException:
pass pass
return lang return lang
@ -682,10 +685,10 @@ class WebImage(object):
if self.is_downsizeable: if self.is_downsizeable:
try: try:
src = [ src = [
e for e in self.sizes \ e for e in self.sizes
if e[0] == shared.config.getint('photo', 'default') if e[0] == shared.config.getint('photo', 'default')
][0][1]['url'] ][0][1]['url']
except: except BaseException:
pass pass
return src return src
@ -953,7 +956,8 @@ class WebImage(object):
if not self.is_downsizeable: if not self.is_downsizeable:
return self._copy() return self._copy()
if not self.needs_downsize and not shared.config.getboolean('params', 'regenerate'): if not self.needs_downsize and not shared.config.getboolean(
'params', 'regenerate'):
return return
build_files = os.path.join( build_files = os.path.join(
@ -1335,5 +1339,6 @@ def build():
logging.debug("copying static file %s to %s", s, d) logging.debug("copying static file %s to %s", s, d)
shutil.copy2(s, d) shutil.copy2(s, d)
if __name__ == '__main__': if __name__ == '__main__':
build() build()

View file

@ -10,6 +10,7 @@ import requests
from slugify import slugify from slugify import slugify
import jinja2 import jinja2
class CMDLine(object): class CMDLine(object):
def __init__(self, executable): def __init__(self, executable):
self.executable = self._which(executable) self.executable = self._which(executable)
@ -80,6 +81,7 @@ class XRay(CMDLine):
return json.loads(stdout.decode('utf-8').strip()) return json.loads(stdout.decode('utf-8').strip())
class Pandoc(CMDLine): class Pandoc(CMDLine):
""" Pandoc command line call with piped in- and output """ """ Pandoc command line call with piped in- and output """
@ -233,7 +235,8 @@ class ExifTool(CMDLine):
exif = json.loads(stdout.decode('utf-8').strip()).pop() exif = json.loads(stdout.decode('utf-8').strip()).pop()
if 'ReleaseDate' in exif and 'ReleaseTime' in exif: if 'ReleaseDate' in exif and 'ReleaseTime' in exif:
exif['DateTimeRelease'] = "%s %s" % (exif.get('ReleaseDate'), exif.get('ReleaseTime')[:8]) exif['DateTimeRelease'] = "%s %s" % (
exif.get('ReleaseDate'), exif.get('ReleaseTime')[:8])
del(exif['ReleaseDate']) del(exif['ReleaseDate'])
del(exif['ReleaseTime']) del(exif['ReleaseTime'])
@ -242,6 +245,7 @@ class ExifTool(CMDLine):
return exif return exif
class BaseDB(object): class BaseDB(object):
def __init__(self, fpath): def __init__(self, fpath):
self.db = sqlite3.connect(fpath) self.db = sqlite3.connect(fpath)
@ -279,7 +283,6 @@ class BaseDB(object):
# self.db.commit() # self.db.commit()
# TODO class SearchDBng(object): # TODO class SearchDBng(object):
# TODO class EXIFDBng(object): # TODO class EXIFDBng(object):
@ -355,8 +358,10 @@ class TokenDB(object):
del(self.tokens[service]) del(self.tokens[service])
self.save() self.save()
class SearchDB(BaseDB): class SearchDB(BaseDB):
tmplfile = 'Search.html' tmplfile = 'Search.html'
def __init__(self): def __init__(self):
self.fpath = "%s" % config.get('var', 'searchdb') self.fpath = "%s" % config.get('var', 'searchdb')
super().__init__(self.fpath) super().__init__(self.fpath)
@ -435,7 +440,6 @@ class SearchDB(BaseDB):
if category not in ret: if category not in ret:
ret.update({category: {}}) ret.update({category: {}})
maybe_fpath = os.path.join( maybe_fpath = os.path.join(
config.get('dirs', 'content'), config.get('dirs', 'content'),
category, category,
@ -452,7 +456,6 @@ class SearchDB(BaseDB):
}) })
return ret return ret
def cli(self, query): def cli(self, query):
results = self.search_by_query(query) results = self.search_by_query(query)
for c, items in sorted(results.items()): for c, items in sorted(results.items()):
@ -509,7 +512,12 @@ class WebmentionQueue(BaseDB):
logging.debug('getting queued webmentions for %s', fname) logging.debug('getting queued webmentions for %s', fname)
ret = [] ret = []
cursor = self.db.cursor() cursor = self.db.cursor()
cursor.execute('''SELECT * FROM queue WHERE target LIKE ? AND status = 0''', ('%'+fname+'%',)) cursor.execute(
'''SELECT * FROM queue WHERE target LIKE ? AND status = 0''',
('%' +
fname +
'%',
))
rows = cursor.fetchall() rows = cursor.fetchall()
for r in rows: for r in rows:
ret.append({ ret.append({
@ -541,6 +549,7 @@ def __expandconfig():
c.set(s, o, os.path.expanduser(curr)) c.set(s, o, os.path.expanduser(curr))
return c return c
def baseN(num, b=36, numerals="0123456789abcdefghijklmnopqrstuvwxyz"): def baseN(num, b=36, numerals="0123456789abcdefghijklmnopqrstuvwxyz"):
""" Used to create short, lowercase slug for a number (an epoch) passed """ """ Used to create short, lowercase slug for a number (an epoch) passed """
num = int(num) num = int(num)
@ -560,6 +569,7 @@ def slugfname(url):
lower=True lower=True
)[:200] )[:200]
def __setup_sitevars(): def __setup_sitevars():
SiteVars = {} SiteVars = {}
section = 'site' section = 'site'
@ -584,7 +594,6 @@ def __setup_sitevars():
for o in config.options(section): for o in config.options(section):
SiteVars[section].update({o: config.get(section, o)}) SiteVars[section].update({o: config.get(section, o)})
# push the whole thing into cache # push the whole thing into cache
return SiteVars return SiteVars
@ -604,7 +613,7 @@ def notify(msg):
# fire and forget # fire and forget
try: try:
requests.post(url, data=data) requests.post(url, data=data)
except: except BaseException:
pass pass