- formatting with black
- added YAML files in case I ever want to use the saved favs as entries in my site - some tiny amount of code refactoring
This commit is contained in:
parent
c7f4aaf8dc
commit
5e5f2e2cf7
10 changed files with 350 additions and 389 deletions
|
@ -161,13 +161,6 @@ class ASLike(common.ImgFav):
|
|||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def exists(self):
|
||||
maybe = glob.glob("%s*" % self.targetprefix)
|
||||
if len(maybe):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def published(self):
|
||||
return arrow.get(self.like.get("published_at"))
|
||||
|
@ -193,10 +186,6 @@ class ASLike(common.ImgFav):
|
|||
cntr = cntr + 1
|
||||
return r
|
||||
|
||||
def run(self):
|
||||
if not self.exists:
|
||||
self.fetch_images()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
t = ASFavs()
|
||||
|
|
102
DeviantArt.py
102
DeviantArt.py
|
@ -9,39 +9,39 @@ import settings
|
|||
from pprint import pprint
|
||||
import logging
|
||||
|
||||
|
||||
class DAFavs(common.Favs):
|
||||
def __init__(self):
|
||||
super().__init__('deviantart')
|
||||
super().__init__("deviantart")
|
||||
self.client = deviantart.Api(
|
||||
keys.deviantart.get('key'),
|
||||
keys.deviantart.get('secret'),
|
||||
scope='user'
|
||||
keys.deviantart.get("key"), keys.deviantart.get("secret"), scope="user"
|
||||
)
|
||||
self.favfolder = None
|
||||
|
||||
@property
|
||||
def feeds(self):
|
||||
logging.info('Generating OPML feeds for DeviantArt')
|
||||
logging.info("Generating OPML feeds for DeviantArt")
|
||||
feeds = []
|
||||
offset = 0
|
||||
has_more = True
|
||||
while has_more:
|
||||
logging.info('Generating OPML feeds for DeviantArt: offset %d' % offset)
|
||||
logging.info("Generating OPML feeds for DeviantArt: offset %d" % offset)
|
||||
try:
|
||||
following = self.client.get_friends(
|
||||
username=keys.deviantart.get('username'),
|
||||
offset=offset,
|
||||
limit=24
|
||||
username=keys.deviantart.get("username"), offset=offset, limit=24
|
||||
)
|
||||
offset = following.get('next_offset')
|
||||
for follow in following.get('results'):
|
||||
u = follow.get('user').username.lower()
|
||||
feeds.append({
|
||||
'text': u,
|
||||
'xmlUrl': "https://backend.deviantart.com/rss.xml?q=gallery%%3A%s" % u,
|
||||
'htmlUrl': "https://www.deviantart.com/%s" % u
|
||||
})
|
||||
has_more = following.get('has_more')
|
||||
offset = following.get("next_offset")
|
||||
for follow in following.get("results"):
|
||||
u = follow.get("user").username.lower()
|
||||
feeds.append(
|
||||
{
|
||||
"text": u,
|
||||
"xmlUrl": "https://backend.deviantart.com/rss.xml?q=gallery%%3A%s"
|
||||
% u,
|
||||
"htmlUrl": "https://www.deviantart.com/%s" % u,
|
||||
}
|
||||
)
|
||||
has_more = following.get("has_more")
|
||||
except deviantart.api.DeviantartError as e:
|
||||
print(e)
|
||||
break
|
||||
|
@ -50,18 +50,16 @@ class DAFavs(common.Favs):
|
|||
def run(self):
|
||||
offset = 0
|
||||
while not self.favfolder:
|
||||
logging.info('fetching for DeviantArt: offset %d' % offset)
|
||||
logging.info("fetching for DeviantArt: offset %d" % offset)
|
||||
try:
|
||||
folders = self.client.get_collections(
|
||||
username=keys.deviantart.get('username'),
|
||||
offset=offset,
|
||||
limit=24
|
||||
username=keys.deviantart.get("username"), offset=offset, limit=24
|
||||
)
|
||||
offset = folders.get('next_offset')
|
||||
for r in folders.get('results'):
|
||||
if r.get('name') == 'Featured':
|
||||
self.favfolder = r.get('folderid')
|
||||
if (folders.get('has_more') == False):
|
||||
offset = folders.get("next_offset")
|
||||
for r in folders.get("results"):
|
||||
if r.get("name") == "Featured":
|
||||
self.favfolder = r.get("folderid")
|
||||
if folders.get("has_more") == False:
|
||||
break
|
||||
except deviantart.api.DeviantartError as e:
|
||||
print(e)
|
||||
|
@ -73,17 +71,17 @@ class DAFavs(common.Favs):
|
|||
try:
|
||||
fetched = self.client.get_collection(
|
||||
self.favfolder,
|
||||
username=keys.deviantart.get('username'),
|
||||
username=keys.deviantart.get("username"),
|
||||
offset=offset,
|
||||
limit=24,
|
||||
# mature_content=True
|
||||
)
|
||||
for r in fetched.get('results'):
|
||||
for r in fetched.get("results"):
|
||||
fav = DAFav(r)
|
||||
fav.run()
|
||||
offset = fetched.get('next_offset')
|
||||
has_more = fetched.get('has_more')
|
||||
if (has_more == False):
|
||||
offset = fetched.get("next_offset")
|
||||
has_more = fetched.get("has_more")
|
||||
if has_more == False:
|
||||
break
|
||||
except deviantart.api.DeviantartError as e:
|
||||
print(e)
|
||||
|
@ -91,7 +89,7 @@ class DAFavs(common.Favs):
|
|||
|
||||
|
||||
class DAFav(common.ImgFav):
|
||||
def __init__(self, deviation, ):
|
||||
def __init__(self, deviation):
|
||||
self.deviation = deviation
|
||||
|
||||
def __str__(self):
|
||||
|
@ -100,8 +98,10 @@ class DAFav(common.ImgFav):
|
|||
@property
|
||||
def author(self):
|
||||
return {
|
||||
'name': self.deviation.author,
|
||||
'url': 'http://%s.deviantart.com' % self.deviation.author
|
||||
"name": self.deviation.author.username,
|
||||
"id": self.deviation.author.userid,
|
||||
"image": self.deviation.author.usericon,
|
||||
"url": "http://%s.deviantart.com" % self.deviation.author.username,
|
||||
}
|
||||
|
||||
@property
|
||||
|
@ -116,7 +116,7 @@ class DAFav(common.ImgFav):
|
|||
def content(self):
|
||||
if self.deviation.excerpt:
|
||||
return "%s" % self.deviation.excerpt
|
||||
return ''
|
||||
return ""
|
||||
|
||||
@property
|
||||
def title(self):
|
||||
|
@ -128,21 +128,15 @@ class DAFav(common.ImgFav):
|
|||
@property
|
||||
def targetprefix(self):
|
||||
return os.path.join(
|
||||
settings.paths.get('archive'),
|
||||
'favorite',
|
||||
"deviantart_%s_%s_%s" % (
|
||||
common.slugfname('%s' % self.deviation.author),
|
||||
settings.paths.get("archive"),
|
||||
"favorite",
|
||||
"deviantart_%s_%s_%s"
|
||||
% (
|
||||
common.slugfname("%s" % self.deviation.author),
|
||||
self.id,
|
||||
common.slugfname('%s' % self.title)
|
||||
common.slugfname("%s" % self.title),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def exists(self):
|
||||
maybe = glob.glob("%s*" % self.targetprefix)
|
||||
if len(maybe):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def published(self):
|
||||
|
@ -155,15 +149,9 @@ class DAFav(common.ImgFav):
|
|||
@property
|
||||
def images(self):
|
||||
f = "%s%s" % (self.targetprefix, common.TMPFEXT)
|
||||
return {
|
||||
f: self.deviation.content.get('src')
|
||||
}
|
||||
|
||||
def run(self):
|
||||
if not self.exists:
|
||||
self.fetch_images()
|
||||
return {f: self.deviation.content.get("src")}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
t = DAFavs()
|
||||
t.run()
|
||||
|
|
89
Flickr.py
89
Flickr.py
|
@ -15,31 +15,29 @@ import logging
|
|||
|
||||
class FlickrFavs(common.Favs):
|
||||
def __init__(self):
|
||||
super().__init__('flickr')
|
||||
super().__init__("flickr")
|
||||
flickr_api.set_keys(
|
||||
api_key = keys.flickr.get('key'),
|
||||
api_secret = keys.flickr.get('secret')
|
||||
)
|
||||
self.user = flickr_api.Person.findByUserName(
|
||||
keys.flickr.get('username')
|
||||
api_key=keys.flickr.get("key"), api_secret=keys.flickr.get("secret")
|
||||
)
|
||||
self.user = flickr_api.Person.findByUserName(keys.flickr.get("username"))
|
||||
|
||||
@property
|
||||
def feeds(self):
|
||||
logging.info('Generating OPML feeds for Flickr')
|
||||
logging.info("Generating OPML feeds for Flickr")
|
||||
feeds = []
|
||||
pages = 1
|
||||
page = 1
|
||||
while page <= pages:
|
||||
fetched = self.user.getPublicContacts(
|
||||
page=page
|
||||
)
|
||||
fetched = self.user.getPublicContacts(page=page)
|
||||
for u in fetched:
|
||||
feeds.append({
|
||||
'text': u.username,
|
||||
'xmlUrl': "https://api.flickr.com/services/feeds/photos_public.gne?lang=en-us&format=rss_200&id=%s" % u.id,
|
||||
'htmlUrl': "https://www.flickr.com/photos/%s" % u.id
|
||||
})
|
||||
feeds.append(
|
||||
{
|
||||
"text": u.username,
|
||||
"xmlUrl": "https://api.flickr.com/services/feeds/photos_public.gne?lang=en-us&format=rss_200&id=%s"
|
||||
% u.id,
|
||||
"htmlUrl": "https://www.flickr.com/photos/%s" % u.id,
|
||||
}
|
||||
)
|
||||
pages = fetched.info.pages
|
||||
page = page + 1
|
||||
return feeds
|
||||
|
@ -48,11 +46,9 @@ class FlickrFavs(common.Favs):
|
|||
pages = 1
|
||||
page = 1
|
||||
while page <= pages:
|
||||
logging.info('fetching for Flickr: page %d' % page)
|
||||
logging.info("fetching for Flickr: page %d" % page)
|
||||
fetched = self.user.getFavorites(
|
||||
user_id=self.user.id,
|
||||
page=page,
|
||||
min_fave_date=self.since
|
||||
user_id=self.user.id, page=page, min_fave_date=self.since
|
||||
)
|
||||
for p in fetched:
|
||||
photo = FlickrFav(p)
|
||||
|
@ -70,7 +66,7 @@ class FlickrFav(common.ImgFav):
|
|||
|
||||
@cached_property
|
||||
def owner(self):
|
||||
return self.info.get('owner')
|
||||
return self.info.get("owner")
|
||||
|
||||
@cached_property
|
||||
def info(self):
|
||||
|
@ -79,82 +75,63 @@ class FlickrFav(common.ImgFav):
|
|||
@property
|
||||
def author(self):
|
||||
return {
|
||||
'name': "%s" % self.owner.username,
|
||||
'url': "%s" % self.owner.getProfileUrl(),
|
||||
"name": "%s" % self.owner.username,
|
||||
"url": "%s" % self.owner.getProfileUrl(),
|
||||
}
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return "%s" % self.info.get('id')
|
||||
return "%s" % self.info.get("id")
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return "https://www.flickr.com/photos/%s/%s/" % (
|
||||
self.owner.id,
|
||||
self.id
|
||||
)
|
||||
return "https://www.flickr.com/photos/%s/%s/" % (self.owner.id, self.id)
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
return "%s" % self.info.get('description')
|
||||
return "%s" % self.info.get("description")
|
||||
|
||||
@property
|
||||
def geo(self):
|
||||
if 'location' not in self.info:
|
||||
if "location" not in self.info:
|
||||
return None
|
||||
|
||||
lat = self.info.get('location').get('latitude', None)
|
||||
lon = self.info.get('location').get('longitude', None)
|
||||
lat = self.info.get("location").get("latitude", None)
|
||||
lon = self.info.get("location").get("longitude", None)
|
||||
return (lat, lon)
|
||||
|
||||
@property
|
||||
def title(self):
|
||||
return clean(''.strip("%s" % self.info.get('title')))
|
||||
return clean("".strip("%s" % self.info.get("title")))
|
||||
|
||||
@property
|
||||
def targetprefix(self):
|
||||
return os.path.join(
|
||||
settings.paths.get('archive'),
|
||||
'favorite',
|
||||
"flickr_%s_%s" % (
|
||||
common.slugfname('%s' % self.owner.id),
|
||||
self.id,
|
||||
settings.paths.get("archive"),
|
||||
"favorite",
|
||||
"flickr_%s_%s" % (common.slugfname("%s" % self.owner.id), self.id),
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def exists(self):
|
||||
maybe = glob.glob("%s*" % self.targetprefix)
|
||||
if len(maybe):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def published(self):
|
||||
return arrow.get(self.info.get('dateuploaded'))
|
||||
return arrow.get(self.info.get("dateuploaded"))
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
tags = []
|
||||
for t in self.info.get('tags'):
|
||||
for t in self.info.get("tags"):
|
||||
tags.append("%s" % t.text)
|
||||
return tags
|
||||
|
||||
@property
|
||||
def images(self):
|
||||
sizes = self.flickrphoto.getSizes()
|
||||
for maybe in ['Original', 'Large 2048', 'Large 1600', 'Large', 'Medium']:
|
||||
for maybe in ["Original", "Large 2048", "Large 1600", "Large", "Medium"]:
|
||||
if maybe in sizes:
|
||||
f = "%s%s" % (self.targetprefix, common.TMPFEXT)
|
||||
return {
|
||||
f: sizes.get(maybe).get('source')
|
||||
}
|
||||
|
||||
def run(self):
|
||||
if not self.exists:
|
||||
self.fetch_images()
|
||||
return {f: sizes.get(maybe).get("source")}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
t = FlickrFavs()
|
||||
t.run()
|
||||
|
|
69
LastFM.py
69
LastFM.py
|
@ -11,8 +11,7 @@ import keys
|
|||
from pprint import pprint
|
||||
|
||||
Track = namedtuple(
|
||||
'Track',
|
||||
['timestamp', 'artist', 'album', 'title', 'artistid', 'albumid', 'img']
|
||||
"Track", ["timestamp", "artist", "album", "title", "artistid", "albumid", "img"]
|
||||
)
|
||||
|
||||
|
||||
|
@ -36,37 +35,34 @@ class cached_property(object):
|
|||
|
||||
|
||||
class LastFM(object):
|
||||
url = 'http://ws.audioscrobbler.com/2.0/'
|
||||
url = "http://ws.audioscrobbler.com/2.0/"
|
||||
|
||||
def __init__(self):
|
||||
self.params = {
|
||||
'method': 'user.getrecenttracks',
|
||||
'user': keys.lastfm.get('username'),
|
||||
'api_key': keys.lastfm.get('key'),
|
||||
'format': 'json',
|
||||
'limit': '200'
|
||||
"method": "user.getrecenttracks",
|
||||
"user": keys.lastfm.get("username"),
|
||||
"api_key": keys.lastfm.get("key"),
|
||||
"format": "json",
|
||||
"limit": "200",
|
||||
}
|
||||
if os.path.isfile(self.target):
|
||||
mtime = os.path.getmtime(self.target)
|
||||
self.params.update({'from': mtime})
|
||||
self.params.update({"from": mtime})
|
||||
|
||||
@property
|
||||
def target(self):
|
||||
return os.path.join(
|
||||
settings.paths.get('archive'),
|
||||
'lastfm.csv'
|
||||
)
|
||||
return os.path.join(settings.paths.get("archive"), "lastfm.csv")
|
||||
|
||||
@cached_property
|
||||
def existing(self):
|
||||
timestamps = []
|
||||
with open(self.target, 'r') as f:
|
||||
with open(self.target, "r") as f:
|
||||
r = csv.reader(f)
|
||||
for row in r:
|
||||
try:
|
||||
timestamps.append(arrow.get(row[0]).timestamp)
|
||||
except Exception as e:
|
||||
logging.error('arrow failed on row %s', row)
|
||||
logging.error("arrow failed on row %s", row)
|
||||
continue
|
||||
return timestamps
|
||||
|
||||
|
@ -78,38 +74,37 @@ class LastFM(object):
|
|||
tracks = []
|
||||
if not data:
|
||||
return tracks
|
||||
for track in data.get('track', []):
|
||||
if 'date' not in track:
|
||||
for track in data.get("track", []):
|
||||
if "date" not in track:
|
||||
continue
|
||||
ts = arrow.get(int(track.get('date').get('uts')))
|
||||
ts = arrow.get(int(track.get("date").get("uts")))
|
||||
if ts.timestamp in self.existing:
|
||||
continue
|
||||
entry = Track(
|
||||
ts.format('YYYY-MM-DDTHH:mm:ssZ'),
|
||||
track.get('artist').get('#text', ''),
|
||||
track.get('album').get('#text', ''),
|
||||
track.get('name', ''),
|
||||
track.get('artist').get('mbid', ''),
|
||||
track.get('album').get('mbid', ''),
|
||||
track.get('image', [])[-1].get('#text', ''),
|
||||
ts.format("YYYY-MM-DDTHH:mm:ssZ"),
|
||||
track.get("artist").get("#text", ""),
|
||||
track.get("album").get("#text", ""),
|
||||
track.get("name", ""),
|
||||
track.get("artist").get("mbid", ""),
|
||||
track.get("album").get("mbid", ""),
|
||||
track.get("image", [])[-1].get("#text", ""),
|
||||
)
|
||||
tracks.append(entry)
|
||||
return tracks
|
||||
|
||||
def fetch(self):
|
||||
r = requests.get(self.url, params=self.params)
|
||||
return json.loads(r.text).get('recenttracks')
|
||||
|
||||
return json.loads(r.text).get("recenttracks")
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
data = self.fetch()
|
||||
tracks = self.extracttracks(data)
|
||||
total = int(data.get('@attr').get('totalPages'))
|
||||
current = int(data.get('@attr').get('page'))
|
||||
total = int(data.get("@attr").get("totalPages"))
|
||||
current = int(data.get("@attr").get("page"))
|
||||
cntr = total - current
|
||||
except Exception as e:
|
||||
logging.error('Something went wrong: %s', e)
|
||||
logging.error("Something went wrong: %s", e)
|
||||
return
|
||||
|
||||
if not len(tracks):
|
||||
|
@ -118,24 +113,22 @@ class LastFM(object):
|
|||
while cntr > 0:
|
||||
current = current + 1
|
||||
cntr = total - current
|
||||
logging.info('requesting page #%d of paginated results', current)
|
||||
self.params.update({
|
||||
'page': current
|
||||
})
|
||||
logging.info("requesting page #%d of paginated results", current)
|
||||
self.params.update({"page": current})
|
||||
data = self.fetch()
|
||||
tracks = tracks + self.extracttracks(data)
|
||||
|
||||
if not self.exists:
|
||||
with open(self.target, 'w') as f:
|
||||
with open(self.target, "w") as f:
|
||||
writer = csv.DictWriter(f, fieldnames=Track._fields)
|
||||
writer.writeheader()
|
||||
|
||||
if len(tracks):
|
||||
with open(self.target, 'a') as f:
|
||||
with open(self.target, "a") as f:
|
||||
writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
|
||||
writer.writerows(sorted(tracks, key=attrgetter('timestamp')))
|
||||
writer.writerows(sorted(tracks, key=attrgetter("timestamp")))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
lfm = LastFM()
|
||||
lfm.run()
|
||||
|
|
1
Pipfile
1
Pipfile
|
@ -14,6 +14,7 @@ bleach = "*"
|
|||
deviantart = "*"
|
||||
flickr-api = "*"
|
||||
pytumblr = "*"
|
||||
pyyaml = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
||||
|
|
128
Pipfile.lock
generated
128
Pipfile.lock
generated
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "bcf7b4826cf074e5bbb52fdccf3cec966ce0eeded6c38dfd39f530f3aa223cde"
|
||||
"sha256": "654f2f42d6d9e3dd3aaf13b371369e3943573472fc93786661eff68d965dcb8b"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
|
@ -18,10 +18,11 @@
|
|||
"default": {
|
||||
"arrow": {
|
||||
"hashes": [
|
||||
"sha256:9cb4a910256ed536751cd5728673bfb53e6f0026e240466f90c2a92c0b79c895"
|
||||
"sha256:03404b624e89ac5e4fc19c52045fa0f3203419fd4dd64f6e8958c522580a574a",
|
||||
"sha256:41be7ea4c53c2cf57bf30f2d614f60c411160133f7a0a8c49111c30fb7e725b5"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.13.0"
|
||||
"version": "==0.14.2"
|
||||
},
|
||||
"bleach": {
|
||||
"hashes": [
|
||||
|
@ -33,10 +34,10 @@
|
|||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
|
||||
"sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"
|
||||
"sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
|
||||
"sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
|
||||
],
|
||||
"version": "==2018.11.29"
|
||||
"version": "==2019.6.16"
|
||||
},
|
||||
"chardet": {
|
||||
"hashes": [
|
||||
|
@ -54,10 +55,11 @@
|
|||
},
|
||||
"flickr-api": {
|
||||
"hashes": [
|
||||
"sha256:57ba6845ad891f32144791d9be1e44a9d76afe9b8e32f1034b956dd38718aa79"
|
||||
"sha256:2ff036ce4ca6f9be71a90310be80916b44feaeb95df5c1a9e5f57d49b64032c9",
|
||||
"sha256:b9782c06315946b395d7f1b1e051fa2ff6aab4b21c5e82b1d95c04d7295f5f24"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.6.1"
|
||||
"version": "==0.7.3"
|
||||
},
|
||||
"future": {
|
||||
"hashes": [
|
||||
|
@ -67,9 +69,10 @@
|
|||
},
|
||||
"httplib2": {
|
||||
"hashes": [
|
||||
"sha256:f61fb838a94ce3b349aa32c92fd8430f7e3511afdb18bf9640d647e30c90a6d6"
|
||||
"sha256:158fbd0ffbba536829d664bf3f32c4f45df41f8f791663665162dfaf21ffd075",
|
||||
"sha256:d1146939d270f1f1eb8cbf8f5aa72ff37d897faccca448582bb1e180aeb4c6b2"
|
||||
],
|
||||
"version": "==0.12.0"
|
||||
"version": "==0.13.0"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
|
@ -80,35 +83,33 @@
|
|||
},
|
||||
"lxml": {
|
||||
"hashes": [
|
||||
"sha256:0dd6589fa75d369ba06d2b5f38dae107f76ea127f212f6a7bee134f6df2d1d21",
|
||||
"sha256:1afbac344aa68c29e81ab56c1a9411c3663157b5aee5065b7fa030b398d4f7e0",
|
||||
"sha256:1baad9d073692421ad5dbbd81430aba6c7f5fdc347f03537ae046ddf2c9b2297",
|
||||
"sha256:1d8736421a2358becd3edf20260e41a06a0bf08a560480d3a5734a6bcbacf591",
|
||||
"sha256:1e1d9bddc5afaddf0de76246d3f2152f961697ad7439c559f179002682c45801",
|
||||
"sha256:1f179dc8b2643715f020f4d119d5529b02cd794c1c8f305868b73b8674d2a03f",
|
||||
"sha256:241fb7bdf97cb1df1edfa8f0bcdfd80525d4023dac4523a241907c8b2f44e541",
|
||||
"sha256:2f9765ee5acd3dbdcdc0d0c79309e01f7c16bc8d39b49250bf88de7b46daaf58",
|
||||
"sha256:312e1e1b1c3ce0c67e0b8105317323e12807955e8186872affb667dbd67971f6",
|
||||
"sha256:3273db1a8055ca70257fd3691c6d2c216544e1a70b673543e15cc077d8e9c730",
|
||||
"sha256:34dfaa8c02891f9a246b17a732ca3e99c5e42802416628e740a5d1cb2f50ff49",
|
||||
"sha256:3aa3f5288af349a0f3a96448ebf2e57e17332d99f4f30b02093b7948bd9f94cc",
|
||||
"sha256:51102e160b9d83c1cc435162d90b8e3c8c93b28d18d87b60c56522d332d26879",
|
||||
"sha256:56115fc2e2a4140e8994eb9585119a1ae9223b506826089a3ba753a62bd194a6",
|
||||
"sha256:69d83de14dbe8fe51dccfd36f88bf0b40f5debeac763edf9f8325180190eba6e",
|
||||
"sha256:99fdce94aeaa3ccbdfcb1e23b34273605c5853aa92ec23d84c84765178662c6c",
|
||||
"sha256:a7c0cd5b8a20f3093ee4a67374ccb3b8a126743b15a4d759e2a1bf098faac2b2",
|
||||
"sha256:abe12886554634ed95416a46701a917784cb2b4c77bfacac6916681d49bbf83d",
|
||||
"sha256:b4f67b5183bd5f9bafaeb76ad119e977ba570d2b0e61202f534ac9b5c33b4485",
|
||||
"sha256:bdd7c1658475cc1b867b36d5c4ed4bc316be8d3368abe03d348ba906a1f83b0e",
|
||||
"sha256:c6f24149a19f611a415a51b9bc5f17b6c2f698e0d6b41ffb3fa9f24d35d05d73",
|
||||
"sha256:d1e111b3ab98613115a208c1017f266478b0ab224a67bc8eac670fa0bad7d488",
|
||||
"sha256:d6520aa965773bbab6cb7a791d5895b00d02cf9adc93ac2bf4edb9ac1a6addc5",
|
||||
"sha256:dd185cde2ccad7b649593b0cda72021bc8a91667417001dbaf24cd746ecb7c11",
|
||||
"sha256:de2e5b0828a9d285f909b5d2e9d43f1cf6cf21fe65bc7660bdaa1780c7b58298",
|
||||
"sha256:f726444b8e909c4f41b4fde416e1071cf28fa84634bfb4befdf400933b6463af"
|
||||
"sha256:06c7616601430aa140a69f97e3116308fffe0848f543b639a5ec2e8920ae72fd",
|
||||
"sha256:177202792f9842374a8077735c69c41a4282183f7851443d2beb8ee310720819",
|
||||
"sha256:19317ad721ceb9e39847d11131903931e2794e447d4751ebb0d9236f1b349ff2",
|
||||
"sha256:36d206e62f3e5dbaafd4ec692b67157e271f5da7fd925fda8515da675eace50d",
|
||||
"sha256:387115b066c797c85f9861a9613abf50046a15aac16759bc92d04f94acfad082",
|
||||
"sha256:3ce1c49d4b4a7bc75fb12acb3a6247bb7a91fe420542e6d671ba9187d12a12c2",
|
||||
"sha256:4d2a5a7d6b0dbb8c37dab66a8ce09a8761409c044017721c21718659fa3365a1",
|
||||
"sha256:58d0a1b33364d1253a88d18df6c0b2676a1746d27c969dc9e32d143a3701dda5",
|
||||
"sha256:62a651c618b846b88fdcae0533ec23f185bb322d6c1845733f3123e8980c1d1b",
|
||||
"sha256:69ff21064e7debc9b1b1e2eee8c2d686d042d4257186d70b338206a80c5bc5ea",
|
||||
"sha256:7060453eba9ba59d821625c6af6a266bd68277dce6577f754d1eb9116c094266",
|
||||
"sha256:7d26b36a9c4bce53b9cfe42e67849ae3c5c23558bc08363e53ffd6d94f4ff4d2",
|
||||
"sha256:83b427ad2bfa0b9705e02a83d8d607d2c2f01889eb138168e462a3a052c42368",
|
||||
"sha256:923d03c84534078386cf50193057aae98fa94cace8ea7580b74754493fda73ad",
|
||||
"sha256:b773715609649a1a180025213f67ffdeb5a4878c784293ada300ee95a1f3257b",
|
||||
"sha256:baff149c174e9108d4a2fee192c496711be85534eab63adb122f93e70aa35431",
|
||||
"sha256:bca9d118b1014b4c2d19319b10a3ebed508ff649396ce1855e1c96528d9b2fa9",
|
||||
"sha256:ce580c28845581535dc6000fc7c35fdadf8bea7ccb57d6321b044508e9ba0685",
|
||||
"sha256:d34923a569e70224d88e6682490e24c842907ba2c948c5fd26185413cbe0cd96",
|
||||
"sha256:dd9f0e531a049d8b35ec5e6c68a37f1ba6ec3a591415e6804cbdf652793d15d7",
|
||||
"sha256:ecb805cbfe9102f3fd3d2ef16dfe5ae9e2d7a7dfbba92f4ff1e16ac9784dbfb0",
|
||||
"sha256:ede9aad2197a0202caff35d417b671f5f91a3631477441076082a17c94edd846",
|
||||
"sha256:ef2d1fc370400e0aa755aab0b20cf4f1d0e934e7fd5244f3dd4869078e4942b9",
|
||||
"sha256:f2fec194a49bfaef42a548ee657362af5c7a640da757f6f452a35da7dd9f923c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.3.0"
|
||||
"version": "==4.3.4"
|
||||
},
|
||||
"oauth2": {
|
||||
"hashes": [
|
||||
|
@ -119,33 +120,50 @@
|
|||
},
|
||||
"oauthlib": {
|
||||
"hashes": [
|
||||
"sha256:0ce32c5d989a1827e3f1148f98b9085ed2370fc939bf524c9c851d8714797298",
|
||||
"sha256:3e1e14f6cde7e5475128d30e97edc3bfb4dc857cb884d8714ec161fdbb3b358e"
|
||||
"sha256:40a63637707e9163eda62d0f5345120c65e001a790480b8256448543c1f78f66",
|
||||
"sha256:b4d99ae8ccfb7d33ba9591b59355c64eef5241534aa3da2e4c0435346b84bc8e"
|
||||
],
|
||||
"version": "==3.0.1"
|
||||
"version": "==3.0.2"
|
||||
},
|
||||
"python-dateutil": {
|
||||
"hashes": [
|
||||
"sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93",
|
||||
"sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"
|
||||
"sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
|
||||
"sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
|
||||
],
|
||||
"version": "==2.7.5"
|
||||
"version": "==2.8.0"
|
||||
},
|
||||
"pytumblr": {
|
||||
"hashes": [
|
||||
"sha256:ce0ba73f27237d1ef7374950b46bb8c4b13d68e6529f733ebc63799c4607ffec",
|
||||
"sha256:d7496f966c0b42e8d8598c60b01a089d89670deb1f80d6b557168d706a428712"
|
||||
"sha256:a3774d3978bcff2db98f36a2e5d17bb8496ac21157b1b518089adad86d0dca72",
|
||||
"sha256:eaa4d98217df7ab6392fa5d8801f4a2bdcba35bf0fd49328aa3c98e3b231b6f2"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.0.8"
|
||||
"version": "==0.1.0"
|
||||
},
|
||||
"pyyaml": {
|
||||
"hashes": [
|
||||
"sha256:57acc1d8533cbe51f6662a55434f0dbecfa2b9eaf115bede8f6fd00115a0c0d3",
|
||||
"sha256:588c94b3d16b76cfed8e0be54932e5729cc185caffaa5a451e7ad2f7ed8b4043",
|
||||
"sha256:68c8dd247f29f9a0d09375c9c6b8fdc64b60810ebf07ba4cdd64ceee3a58c7b7",
|
||||
"sha256:70d9818f1c9cd5c48bb87804f2efc8692f1023dac7f1a1a5c61d454043c1d265",
|
||||
"sha256:86a93cccd50f8c125286e637328ff4eef108400dd7089b46a7be3445eecfa391",
|
||||
"sha256:a0f329125a926876f647c9fa0ef32801587a12328b4a3c741270464e3e4fa778",
|
||||
"sha256:a3c252ab0fa1bb0d5a3f6449a4826732f3eb6c0270925548cac342bc9b22c225",
|
||||
"sha256:b4bb4d3f5e232425e25dda21c070ce05168a786ac9eda43768ab7f3ac2770955",
|
||||
"sha256:cd0618c5ba5bda5f4039b9398bb7fb6a317bb8298218c3de25c47c4740e4b95e",
|
||||
"sha256:ceacb9e5f8474dcf45b940578591c7f3d960e82f926c707788a570b51ba59190",
|
||||
"sha256:fe6a88094b64132c4bb3b631412e90032e8cfe9745a58370462240b8cb7553cd"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.1.1"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
|
||||
"sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
|
||||
"sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
|
||||
"sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.21.0"
|
||||
"version": "==2.22.0"
|
||||
},
|
||||
"requests-oauthlib": {
|
||||
"hashes": [
|
||||
|
@ -177,17 +195,17 @@
|
|||
},
|
||||
"unidecode": {
|
||||
"hashes": [
|
||||
"sha256:092cdf7ad9d1052c50313426a625b717dab52f7ac58f859e09ea020953b1ad8f",
|
||||
"sha256:8b85354be8fd0c0e10adbf0675f6dc2310e56fda43fa8fe049123b6c475e52fb"
|
||||
"sha256:1d7a042116536098d05d599ef2b8616759f02985c85b4fef50c78a5aaf10822a",
|
||||
"sha256:2b6aab710c2a1647e928e36d69c21e76b453cd455f4e2621000e54b2a9b8cce8"
|
||||
],
|
||||
"version": "==1.0.23"
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
|
||||
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
|
||||
"sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
|
||||
"sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
|
||||
],
|
||||
"version": "==1.24.1"
|
||||
"version": "==1.25.3"
|
||||
},
|
||||
"webencodings": {
|
||||
"hashes": [
|
||||
|
|
93
Tumblr.py
93
Tumblr.py
|
@ -12,50 +12,56 @@ from pprint import pprint
|
|||
|
||||
class TumblrFavs(common.Favs):
|
||||
def __init__(self):
|
||||
super().__init__('tumblr')
|
||||
super().__init__("tumblr")
|
||||
self.client = pytumblr.TumblrRestClient(
|
||||
keys.tumblr.get('key'),
|
||||
keys.tumblr.get('secret'),
|
||||
keys.tumblr.get('oauth_token'),
|
||||
keys.tumblr.get('oauth_secret')
|
||||
keys.tumblr.get("key"),
|
||||
keys.tumblr.get("secret"),
|
||||
keys.tumblr.get("oauth_token"),
|
||||
keys.tumblr.get("oauth_secret"),
|
||||
)
|
||||
|
||||
@property
|
||||
def feeds(self):
|
||||
logging.info('Generating OPML feeds for Tumblr')
|
||||
logging.info("Generating OPML feeds for Tumblr")
|
||||
feeds = []
|
||||
offset = 0
|
||||
has_more = True
|
||||
while has_more:
|
||||
fetched = self.client.following(offset=offset)
|
||||
if '_links' in fetched and 'next' in fetched['_links'] and len(fetched):
|
||||
offset = fetched.get('_links').get('next').get('query_params').get('offset')
|
||||
if "_links" in fetched and "next" in fetched["_links"] and len(fetched):
|
||||
offset = (
|
||||
fetched.get("_links").get("next").get("query_params").get("offset")
|
||||
)
|
||||
else:
|
||||
has_more = False
|
||||
|
||||
for u in fetched.get('blogs'):
|
||||
feeds.append({
|
||||
'text': u.get('name'),
|
||||
'xmlUrl': "%srss" % u.get('url'),
|
||||
'htmlUrl': u.get('url')
|
||||
})
|
||||
for u in fetched.get("blogs"):
|
||||
feeds.append(
|
||||
{
|
||||
"text": u.get("name"),
|
||||
"xmlUrl": "%srss" % u.get("url"),
|
||||
"htmlUrl": u.get("url"),
|
||||
}
|
||||
)
|
||||
return feeds
|
||||
|
||||
def run(self):
|
||||
has_more = True
|
||||
after = self.since
|
||||
while has_more:
|
||||
logging.info('fetching for Tumblr: after %d' % after)
|
||||
logging.info("fetching for Tumblr: after %d" % after)
|
||||
fetched = self.client.likes(after=after)
|
||||
if 'liked_posts' not in fetched:
|
||||
if "liked_posts" not in fetched:
|
||||
has_more = False
|
||||
elif '_links' in fetched and 'prev' in fetched['_links'] and len(fetched):
|
||||
after = fetched.get('_links').get('prev').get('query_params').get('after')
|
||||
elif "_links" in fetched and "prev" in fetched["_links"] and len(fetched):
|
||||
after = (
|
||||
fetched.get("_links").get("prev").get("query_params").get("after")
|
||||
)
|
||||
after = int(after)
|
||||
else:
|
||||
has_more = False
|
||||
|
||||
for like in fetched.get('liked_posts'):
|
||||
for like in fetched.get("liked_posts"):
|
||||
fav = TumblrFav(like)
|
||||
fav.run()
|
||||
|
||||
|
@ -69,25 +75,25 @@ class TumblrFav(common.ImgFav):
|
|||
|
||||
@property
|
||||
def blogname(self):
|
||||
return self.data.get('blog_name')
|
||||
return self.data.get("blog_name")
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self.data.get('id')
|
||||
return self.data.get("id")
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self.data.get('post_url')
|
||||
return self.data.get("post_url")
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
return "%s" % self.data.get('caption', '')
|
||||
return "%s" % self.data.get("caption", "")
|
||||
|
||||
@property
|
||||
def title(self):
|
||||
title = self.data.get('summary', '')
|
||||
title = self.data.get("summary", "")
|
||||
if not len(title):
|
||||
title = self.data.get('slug', '')
|
||||
title = self.data.get("slug", "")
|
||||
if not len(title):
|
||||
title = common.slugfname(self.url)
|
||||
return clean(title.strip())
|
||||
|
@ -95,56 +101,39 @@ class TumblrFav(common.ImgFav):
|
|||
@property
|
||||
def targetprefix(self):
|
||||
return os.path.join(
|
||||
settings.paths.get('archive'),
|
||||
'favorite',
|
||||
"tumblr_%s_%s" % (self.blogname, self.id)
|
||||
settings.paths.get("archive"),
|
||||
"favorite",
|
||||
"tumblr_%s_%s" % (self.blogname, self.id),
|
||||
)
|
||||
|
||||
@property
|
||||
def exists(self):
|
||||
maybe = glob.glob("%s*" % self.targetprefix)
|
||||
if len(maybe):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def published(self):
|
||||
maybe = self.data.get('liked_timestamp', False)
|
||||
maybe = self.data.get("liked_timestamp", False)
|
||||
if not maybe:
|
||||
maybe = self.data.get('date', False)
|
||||
maybe = self.data.get("date", False)
|
||||
if not maybe:
|
||||
maybe = arrow.utcnow().timestamp
|
||||
return arrow.get(maybe)
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
return self.data.get('tags', [])
|
||||
return self.data.get("tags", [])
|
||||
|
||||
@property
|
||||
def author(self):
|
||||
return {
|
||||
'name': self.blogname,
|
||||
'url': 'http://%s.tumblr.com' % self.blogname
|
||||
}
|
||||
return {"name": self.blogname, "url": "http://%s.tumblr.com" % self.blogname}
|
||||
|
||||
@property
|
||||
def images(self):
|
||||
r = {}
|
||||
cntr = 0
|
||||
for p in self.data.get('photos', []):
|
||||
for p in self.data.get("photos", []):
|
||||
f = "%s_%d%s" % (self.targetprefix, cntr, common.TMPFEXT)
|
||||
r.update({
|
||||
f: p.get('original_size').get('url')
|
||||
})
|
||||
r.update({f: p.get("original_size").get("url")})
|
||||
cntr = cntr + 1
|
||||
return r
|
||||
|
||||
|
||||
def run(self):
|
||||
if not self.exists:
|
||||
self.fetch_images()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
t = TumblrFavs()
|
||||
t.run()
|
||||
|
|
198
common.py
198
common.py
|
@ -14,16 +14,23 @@ from requests.auth import HTTPBasicAuth
|
|||
import arrow
|
||||
import settings
|
||||
import keys
|
||||
import yaml
|
||||
from pprint import pprint
|
||||
|
||||
TMPFEXT = '.xyz'
|
||||
TMPFEXT = ".xyz"
|
||||
MDFEXT = ".md"
|
||||
|
||||
|
||||
def utfyamldump(data):
|
||||
""" dump YAML with actual UTF-8 chars """
|
||||
return yaml.dump(data, default_flow_style=False, indent=4, allow_unicode=True)
|
||||
|
||||
|
||||
def slugfname(url):
|
||||
return slugify(
|
||||
re.sub(r"^https?://(?:www)?", "", url),
|
||||
only_ascii=True,
|
||||
lower=True
|
||||
)[:200]
|
||||
return slugify(re.sub(r"^https?://(?:www)?", "", url), only_ascii=True, lower=True)[
|
||||
:200
|
||||
]
|
||||
|
||||
|
||||
class cached_property(object):
|
||||
""" extermely simple cached_property decorator:
|
||||
|
@ -31,6 +38,7 @@ class cached_property(object):
|
|||
result is calculated, then the class method is overwritten to be
|
||||
a property, contaning the result from the method
|
||||
"""
|
||||
|
||||
def __init__(self, method, name=None):
|
||||
self.method = method
|
||||
self.name = name or method.__name__
|
||||
|
@ -42,52 +50,42 @@ class cached_property(object):
|
|||
setattr(inst, self.name, result)
|
||||
return result
|
||||
|
||||
|
||||
class Follows(dict):
|
||||
def __init__(self):
|
||||
self.auth = HTTPBasicAuth(
|
||||
keys.miniflux.get('username'),
|
||||
keys.miniflux.get('token')
|
||||
keys.miniflux.get("username"), keys.miniflux.get("token")
|
||||
)
|
||||
|
||||
@property
|
||||
def subscriptions(self):
|
||||
feeds = []
|
||||
params = {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'getFeeds',
|
||||
'id': keys.miniflux.get('id')
|
||||
}
|
||||
params = {"jsonrpc": "2.0", "method": "getFeeds", "id": keys.miniflux.get("id")}
|
||||
r = requests.post(
|
||||
keys.miniflux.get('url'),
|
||||
data=json.dumps(params),
|
||||
auth=self.auth
|
||||
keys.miniflux.get("url"), data=json.dumps(params), auth=self.auth
|
||||
)
|
||||
return r.json().get('result', [])
|
||||
|
||||
return r.json().get("result", [])
|
||||
|
||||
def sync(self):
|
||||
current = []
|
||||
for feed in self.subscriptions:
|
||||
try:
|
||||
current.append(feed['feed_url'])
|
||||
current.append(feed["feed_url"])
|
||||
except Exception as e:
|
||||
logging.error('problem with feed entry: %s', feed)
|
||||
logging.error("problem with feed entry: %s", feed)
|
||||
for silo, feeds in self.items():
|
||||
for feed in feeds:
|
||||
xmlurl = feed.get('xmlUrl')
|
||||
xmlurl = feed.get("xmlUrl")
|
||||
if len(xmlurl) and xmlurl not in current:
|
||||
logging.info('creating subscription for: %s', feed)
|
||||
logging.info("creating subscription for: %s", feed)
|
||||
params = {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'createFeed',
|
||||
'id': keys.miniflux.get('id'),
|
||||
'params': {
|
||||
'url': xmlurl,
|
||||
'group_name': silo
|
||||
}
|
||||
"jsonrpc": "2.0",
|
||||
"method": "createFeed",
|
||||
"id": keys.miniflux.get("id"),
|
||||
"params": {"url": xmlurl, "group_name": silo},
|
||||
}
|
||||
r = requests.post(
|
||||
keys.miniflux.get('url'),
|
||||
keys.miniflux.get("url"),
|
||||
data=json.dumps(params),
|
||||
auth=self.auth,
|
||||
)
|
||||
|
@ -98,59 +96,52 @@ class Follows(dict):
|
|||
opml.addprevious(
|
||||
etree.ProcessingInstruction(
|
||||
"xml-stylesheet",
|
||||
'type="text/xsl" href="%s"' % (settings.opml.get('xsl'))
|
||||
'type="text/xsl" href="%s"' % (settings.opml.get("xsl")),
|
||||
)
|
||||
)
|
||||
head = etree.SubElement(opml, "head")
|
||||
title = etree.SubElement(head, "title").text = settings.opml.get('title')
|
||||
dt = etree.SubElement(head, "dateCreated").text = arrow.utcnow().format('ddd, DD MMM YYYY HH:mm:ss UTC')
|
||||
owner = etree.SubElement(head, "ownerName").text = settings.opml.get('owner')
|
||||
email = etree.SubElement(head, "ownerEmail").text = settings.opml.get('email')
|
||||
title = etree.SubElement(head, "title").text = settings.opml.get("title")
|
||||
dt = etree.SubElement(head, "dateCreated").text = arrow.utcnow().format(
|
||||
"ddd, DD MMM YYYY HH:mm:ss UTC"
|
||||
)
|
||||
owner = etree.SubElement(head, "ownerName").text = settings.opml.get("owner")
|
||||
email = etree.SubElement(head, "ownerEmail").text = settings.opml.get("email")
|
||||
|
||||
body = etree.SubElement(opml, "body")
|
||||
groups = {}
|
||||
for feed in self.subscriptions:
|
||||
# contains sensitive data, skip it
|
||||
if 'sessionid' in feed.get('feed_url') or 'sessionid' in feed.get('site_url'):
|
||||
if "sessionid" in feed.get("feed_url") or "sessionid" in feed.get(
|
||||
"site_url"
|
||||
):
|
||||
continue
|
||||
|
||||
fgroup = feed.get('groups',None)
|
||||
fgroup = feed.get("groups", None)
|
||||
if not fgroup:
|
||||
fgroup = [{
|
||||
'title': 'Unknown',
|
||||
'id': -1
|
||||
}]
|
||||
fgroup = [{"title": "Unknown", "id": -1}]
|
||||
fgroup = fgroup.pop()
|
||||
# some groups need to be skipped
|
||||
if fgroup['title'].lower() in ['private']:
|
||||
if fgroup["title"].lower() in ["private"]:
|
||||
continue
|
||||
if fgroup['title'] not in groups.keys():
|
||||
groups[fgroup['title']] = etree.SubElement(
|
||||
body,
|
||||
"outline",
|
||||
text=fgroup['title']
|
||||
if fgroup["title"] not in groups.keys():
|
||||
groups[fgroup["title"]] = etree.SubElement(
|
||||
body, "outline", text=fgroup["title"]
|
||||
)
|
||||
entry = etree.SubElement(
|
||||
groups[fgroup['title']],
|
||||
groups[fgroup["title"]],
|
||||
"outline",
|
||||
type="rss",
|
||||
text=feed.get('title'),
|
||||
xmlUrl=feed.get('feed_url'),
|
||||
htmlUrl=feed.get('site_url')
|
||||
text=feed.get("title"),
|
||||
xmlUrl=feed.get("feed_url"),
|
||||
htmlUrl=feed.get("site_url"),
|
||||
)
|
||||
|
||||
opmlfile = os.path.join(
|
||||
settings.paths.get('content'),
|
||||
'following.opml'
|
||||
)
|
||||
opmlfile = os.path.join(settings.paths.get("content"), "following.opml")
|
||||
|
||||
with open(opmlfile, 'wb') as f:
|
||||
with open(opmlfile, "wb") as f:
|
||||
f.write(
|
||||
etree.tostring(
|
||||
xmldoc,
|
||||
encoding='utf-8',
|
||||
xml_declaration=True,
|
||||
pretty_print=True
|
||||
xmldoc, encoding="utf-8", xml_declaration=True, pretty_print=True
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -165,11 +156,7 @@ class Favs(object):
|
|||
|
||||
@property
|
||||
def since(self):
|
||||
d = os.path.join(
|
||||
settings.paths.get('archive'),
|
||||
'favorite',
|
||||
"%s*" % self.silo
|
||||
)
|
||||
d = os.path.join(settings.paths.get("archive"), "favorite", "%s*" % self.silo)
|
||||
files = glob.glob(d)
|
||||
if len(files):
|
||||
mtime = max([int(os.path.getmtime(f)) for f in files])
|
||||
|
@ -182,6 +169,33 @@ class ImgFav(object):
|
|||
def __init__(self):
|
||||
return
|
||||
|
||||
def run(self):
|
||||
if not self.exists:
|
||||
self.fetch_images()
|
||||
self.save_txt()
|
||||
|
||||
@property
|
||||
def exists(self):
|
||||
maybe = glob.glob("%s*" % self.targetprefix)
|
||||
if len(maybe):
|
||||
return True
|
||||
return False
|
||||
|
||||
def save_txt(self):
|
||||
attachments = [os.path.basename(fn) for fn in glob.glob("%s*" % self.targetprefix)
|
||||
if not os.path.basename(fn).endswith('.md')]
|
||||
meta = {
|
||||
"title": self.title,
|
||||
"favorite-of": self.url,
|
||||
"date": str(self.published),
|
||||
"sources": list(self.images.values()),
|
||||
"attachments": attachments,
|
||||
"author": self.author,
|
||||
}
|
||||
r = "---\n%s\n---\n\n" % (utfyamldump(meta))
|
||||
with open("%s%s" % (self.targetprefix, MDFEXT), "wt") as fpath:
|
||||
fpath.write(r)
|
||||
|
||||
def fetch_images(self):
|
||||
for fpath, url in self.images.items():
|
||||
self.fetch_image(fpath, url)
|
||||
|
@ -190,7 +204,7 @@ class ImgFav(object):
|
|||
logging.info("pulling image %s to %s", url, fpath)
|
||||
r = requests.get(url, stream=True)
|
||||
if r.status_code == 200:
|
||||
with open(fpath, 'wb') as f:
|
||||
with open(fpath, "wb") as f:
|
||||
r.raw.decode_content = True
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
|
||||
|
@ -198,58 +212,60 @@ class ImgFav(object):
|
|||
if not imgtype:
|
||||
os.remove(fpath)
|
||||
return
|
||||
if imgtype in ['jpg', 'jpeg', 'png']:
|
||||
if imgtype in ["jpg", "jpeg", "png"]:
|
||||
self.write_exif(fpath)
|
||||
os.rename(fpath, fpath.replace(TMPFEXT, ".%s" % (imgtype)))
|
||||
|
||||
def write_exif(self, fpath):
|
||||
logging.info('populating EXIF data of %s' % fpath)
|
||||
logging.info("populating EXIF data of %s" % fpath)
|
||||
|
||||
geo_lat = False
|
||||
geo_lon = False
|
||||
|
||||
if hasattr(self, 'geo') and self.geo != None:
|
||||
if hasattr(self, "geo") and self.geo != None:
|
||||
lat, lon = self.geo
|
||||
if lat and lon and 'null' != lat and 'null' != lon:
|
||||
if lat and lon and "null" != lat and "null" != lon:
|
||||
geo_lat = lat
|
||||
geo_lon = lon
|
||||
|
||||
params = [
|
||||
'exiftool',
|
||||
'-overwrite_original',
|
||||
'-XMP:Copyright=Copyright %s %s (%s)' % (
|
||||
self.published.to('utc').format('YYYY'),
|
||||
self.author.get('name'),
|
||||
self.author.get('url'),
|
||||
"exiftool",
|
||||
"-overwrite_original",
|
||||
"-XMP:Copyright=Copyright %s %s (%s)"
|
||||
% (
|
||||
self.published.to("utc").format("YYYY"),
|
||||
self.author.get("name"),
|
||||
self.author.get("url"),
|
||||
),
|
||||
'-XMP:Source=%s' % self.url,
|
||||
'-XMP:ReleaseDate=%s' % self.published.to('utc').format('YYYY:MM:DD HH:mm:ss'),
|
||||
'-XMP:Headline=%s' % self.title,
|
||||
'-XMP:Description=%s' % self.content,
|
||||
"-XMP:Source=%s" % self.url,
|
||||
"-XMP:ReleaseDate=%s"
|
||||
% self.published.to("utc").format("YYYY:MM:DD HH:mm:ss"),
|
||||
"-XMP:Headline=%s" % self.title,
|
||||
"-XMP:Description=%s" % self.content,
|
||||
]
|
||||
|
||||
for t in self.tags:
|
||||
params.append('-XMP:HierarchicalSubject+=%s' % t)
|
||||
params.append('-XMP:Subject+=%s' % t)
|
||||
params.append("-XMP:HierarchicalSubject+=%s" % t)
|
||||
params.append("-XMP:Subject+=%s" % t)
|
||||
|
||||
if geo_lat and geo_lon:
|
||||
geo_lat = round(float(geo_lat), 6)
|
||||
geo_lon = round(float(geo_lon), 6)
|
||||
|
||||
if geo_lat < 0:
|
||||
GPSLatitudeRef = 'S'
|
||||
GPSLatitudeRef = "S"
|
||||
else:
|
||||
GPSLatitudeRef = 'N'
|
||||
GPSLatitudeRef = "N"
|
||||
|
||||
if geo_lon < 0:
|
||||
GPSLongitudeRef = 'W'
|
||||
GPSLongitudeRef = "W"
|
||||
else:
|
||||
GPSLongitudeRef = 'E'
|
||||
GPSLongitudeRef = "E"
|
||||
|
||||
params.append('-GPSLongitude=%s' % abs(geo_lon))
|
||||
params.append('-GPSLatitude=%s' % abs(geo_lat))
|
||||
params.append('-GPSLongitudeRef=%s' % GPSLongitudeRef)
|
||||
params.append('-GPSLatitudeRef=%s' % GPSLatitudeRef)
|
||||
params.append("-GPSLongitude=%s" % abs(geo_lon))
|
||||
params.append("-GPSLatitude=%s" % abs(geo_lat))
|
||||
params.append("-GPSLongitudeRef=%s" % GPSLongitudeRef)
|
||||
params.append("-GPSLatitudeRef=%s" % GPSLatitudeRef)
|
||||
|
||||
params.append(fpath)
|
||||
|
||||
|
@ -261,6 +277,6 @@ class ImgFav(object):
|
|||
)
|
||||
|
||||
stdout, stderr = p.communicate()
|
||||
_original = '%s_original' % fpath
|
||||
_original = "%s_original" % fpath
|
||||
if os.path.exists(_original):
|
||||
os.unlink(_original)
|
||||
|
|
6
run.py
6
run.py
|
@ -7,8 +7,8 @@ import Flickr
|
|||
import Artstation
|
||||
from pprint import pprint
|
||||
|
||||
lfm = LastFM.LastFM()
|
||||
lfm.run()
|
||||
# lfm = LastFM.LastFM()
|
||||
# lfm.run()
|
||||
|
||||
opml = common.Follows()
|
||||
|
||||
|
@ -16,7 +16,7 @@ silos = [
|
|||
DeviantArt.DAFavs(),
|
||||
Flickr.FlickrFavs(),
|
||||
Tumblr.TumblrFavs(),
|
||||
Artstation.ASFavs()
|
||||
Artstation.ASFavs(),
|
||||
]
|
||||
|
||||
for silo in silos:
|
||||
|
|
34
settings.py
34
settings.py
|
@ -3,37 +3,27 @@ import re
|
|||
import argparse
|
||||
import logging
|
||||
|
||||
base = os.path.abspath(os.path.expanduser('~/Projects/petermolnar.net'))
|
||||
base = os.path.abspath(os.path.expanduser("~/Projects/petermolnar.net"))
|
||||
|
||||
opml = {
|
||||
'owner': 'Peter Molnar',
|
||||
'email': 'mail@petermolnar.net',
|
||||
'title': 'feeds followed by petermolnar.net',
|
||||
'xsl': 'https://petermolnar.net/following.xsl'
|
||||
"owner": "Peter Molnar",
|
||||
"email": "mail@petermolnar.net",
|
||||
"title": "feeds followed by petermolnar.net",
|
||||
"xsl": "https://petermolnar.net/following.xsl",
|
||||
}
|
||||
|
||||
paths = {
|
||||
'archive': os.path.join(base, 'archive'),
|
||||
'content': os.path.join(base, 'content'),
|
||||
"archive": os.path.join(base, "archive"),
|
||||
"content": os.path.join(base, "content"),
|
||||
}
|
||||
|
||||
loglevels = {
|
||||
'critical': 50,
|
||||
'error': 40,
|
||||
'warning': 30,
|
||||
'info': 20,
|
||||
'debug': 10
|
||||
}
|
||||
loglevels = {"critical": 50, "error": 40, "warning": 30, "info": 20, "debug": 10}
|
||||
|
||||
_parser = argparse.ArgumentParser(description='Parameters for silo.pasta')
|
||||
_parser.add_argument(
|
||||
'--loglevel',
|
||||
default='debug',
|
||||
help='change loglevel'
|
||||
)
|
||||
_parser = argparse.ArgumentParser(description="Parameters for silo.pasta")
|
||||
_parser.add_argument("--loglevel", default="debug", help="change loglevel")
|
||||
|
||||
args = vars(_parser.parse_args())
|
||||
logging.basicConfig(
|
||||
level=loglevels[args.get('loglevel')],
|
||||
format='%(asctime)s - %(levelname)s - %(message)s'
|
||||
level=loglevels[args.get("loglevel")],
|
||||
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue