artstation source added

This commit is contained in:
Peter Molnar 2018-12-27 19:50:38 +00:00
parent 32127ec451
commit f2cb57902c
6 changed files with 357 additions and 57 deletions

184
Artstation.py Normal file
View file

@ -0,0 +1,184 @@
import os
import glob
import json
import logging
import arrow
import requests
import keys
import common
import settings
from math import ceil
from pprint import pprint
class ASFavs(common.Favs):
def __init__(self):
super().__init__('artstation')
self.user = keys.artstation.get('username')
def paged_likes(self, page=1):
url = "https://www.artstation.com/users/%s/likes.json?page=%s" % (
self.user,
page
)
js = requests.get(url)
try:
js = js.json()
if 'data' not in js:
return None
return js
except Exception as e:
logging.error('fetching artstation failed: %s', e)
return None
@property
def likes(self):
js = self.paged_likes()
if not js:
return []
likes = js.get('data', [])
pages = ceil(js.get('total_count', 1) / 50)
while pages > 1:
extras = self.paged_likes()
if not extras:
continue
likes = likes + extras.get('data', [])
pages = pages - 1
return likes
@property
def feeds(self):
feeds = []
js = requests.get(
"https://www.artstation.com/users/%s/following.json" % self.user
)
try:
js = js.json()
if 'data' not in js:
logging.error('fetching artstation follows failed: missing data')
return feeds
for f in js.get('data'):
feeds.append({
'text': f.get('username'),
'xmlUrl': "https://www.artstation.com/%s.rss" % f.get('subdomain'),
'htmlUrl': "https://www.artstation.com/%s" % f.get('subdomain'),
})
except Exception as e:
logging.error('parsing artstation follows failed: %s', e)
return feeds
def run(self):
for like in self.likes:
like = ASLike(like)
like.run()
class ASLike(common.ImgFav):
def __init__(self, like ):
self.like = like
def __str__(self):
return "like-of %s" % (self.url)
@property
def url(self):
return self.like.get('permalink')
@property
def data(self):
purl = "%s.json" % (self.url.replace('artwork', 'projects'))
data = requests.get(purl)
try:
data = data.json()
except Exception as e:
logging.error(
'fetching artstation project %s failed: %s',
self.url,
e
)
return None
return data
@property
def author(self):
return {
'name': self.like.get('user').get('username'),
'url': self.like.get('user').get('permalink'),
}
@property
def id(self):
return self.like.get('id')
@property
def content(self):
return '%s' % self.data.get('description_html', '')
@property
def title(self):
title = self.like.get('title')
if not len(title):
title = self.like.get('slug')
if not len(title):
title = common.slugfname(self.url)
return title
@property
def slug(self):
maybe = self.like.get('slug')
if not len(maybe):
maybe = common.slugfname(self.url)
return maybe
@property
def targetprefix(self):
return os.path.join(
settings.paths.get('archive'),
'favorite',
"artstation_%s_%s_%s" % (
common.slugfname('%s' % self.like.get('user').get('username')),
self.like.get('hash_id'),
self.slug
)
)
@property
def exists(self):
maybe = glob.glob("%s*" % self.targetprefix)
if len(maybe):
return True
return False
@property
def published(self):
return arrow.get(self.like.get('published_at'))
@property
def tags(self):
t = []
for c in self.data.get('categories'):
t.append(c.get('name'))
return t
@property
def images(self):
r = {}
cntr = 0
for img in self.data.get('assets'):
if img.get('asset_type') != 'image':
logging.debug('skipping asset: %s' % img)
continue
f = "%s_%d%s" % (self.targetprefix, cntr, common.TMPFEXT)
r.update({
f: img.get('image_url')
})
cntr = cntr + 1
return r
def run(self):
if not self.exists:
self.fetch_images()
if __name__ == '__main__':
t = ASFavs()
t.run()

13
Pipfile
View file

@ -6,11 +6,14 @@ verify_ssl = true
[dev-packages] [dev-packages]
[packages] [packages]
deviantart = "==0.1.5" requests = "*"
arrow = "==0.12.1" arrow = "*"
requests = "==2.19.1" unicode-slugify = "*"
flickr_api = "==0.6.1" lxml = "*"
PyTumblr = "==0.0.8" bleach = "*"
deviantart = "*"
flickr-api = "*"
pytumblr = "*"
[requires] [requires]
python_version = "3.6" python_version = "3.6"

94
Pipfile.lock generated
View file

@ -1,7 +1,7 @@
{ {
"_meta": { "_meta": {
"hash": { "hash": {
"sha256": "df68630bf7b4a7d867f577a19e75d45abb741270e6c8155a99ff2a39aa80f755" "sha256": "8b6f21b4f6848f8c116032411e7508ea0392c799dcbaa14ddfbf68ee24e2cc59"
}, },
"pipfile-spec": 6, "pipfile-spec": 6,
"requires": { "requires": {
@ -23,6 +23,14 @@
"index": "pypi", "index": "pypi",
"version": "==0.12.1" "version": "==0.12.1"
}, },
"bleach": {
"hashes": [
"sha256:48d39675b80a75f6d1c3bdbffec791cf0bbbab665cf01e20da701c77de278718",
"sha256:73d26f018af5d5adcdabf5c1c974add4361a9c76af215fe32fdec8a6fc5fb9b9"
],
"index": "pypi",
"version": "==3.0.2"
},
"certifi": { "certifi": {
"hashes": [ "hashes": [
"sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7", "sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
@ -65,10 +73,46 @@
}, },
"idna": { "idna": {
"hashes": [ "hashes": [
"sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
"sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
], ],
"version": "==2.7" "version": "==2.8"
},
"lxml": {
"hashes": [
"sha256:02bc220d61f46e9b9d5a53c361ef95e9f5e1d27171cd461dddb17677ae2289a5",
"sha256:22f253b542a342755f6cfc047fe4d3a296515cf9b542bc6e261af45a80b8caf6",
"sha256:2f31145c7ff665b330919bfa44aacd3a0211a76ca7e7b441039d2a0b0451e415",
"sha256:36720698c29e7a9626a0dc802ef8885f8f0239bfd1689628ecd459a061f2807f",
"sha256:438a1b0203545521f6616132bfe0f4bca86f8a401364008b30e2b26ec408ce85",
"sha256:4815892904c336bbaf73dafd54f45f69f4021c22b5bad7332176bbf4fb830568",
"sha256:5be031b0f15ad63910d8e5038b489d95a79929513b3634ad4babf77100602588",
"sha256:5c93ae37c3c588e829b037fdfbd64a6e40c901d3f93f7beed6d724c44829a3ad",
"sha256:60842230678674cdac4a1cf0f707ef12d75b9a4fc4a565add4f710b5fcf185d5",
"sha256:62939a8bb6758d1bf923aa1c13f0bcfa9bf5b2fc0f5fa917a6e25db5fe0cfa4e",
"sha256:75830c06a62fe7b8fe3bbb5f269f0b308f19f3949ac81cfd40062f47c1455faf",
"sha256:81992565b74332c7c1aff6a913a3e906771aa81c9d0c68c68113cffcae45bc53",
"sha256:8c892fb0ee52c594d9a7751c7d7356056a9682674b92cc1c4dc968ff0f30c52f",
"sha256:9d862e3cf4fc1f2837dedce9c42269c8c76d027e49820a548ac89fdcee1e361f",
"sha256:a623965c086a6e91bb703d4da62dabe59fe88888e82c4117d544e11fd74835d6",
"sha256:a7783ab7f6a508b0510490cef9f857b763d796ba7476d9703f89722928d1e113",
"sha256:aab09fbe8abfa3b9ce62aaf45aca2d28726b1b9ee44871dbe644050a2fff4940",
"sha256:abf181934ac3ef193832fb973fd7f6149b5c531903c2ec0f1220941d73eee601",
"sha256:ae07fa0c115733fce1e9da96a3ac3fa24801742ca17e917e0c79d63a01eeb843",
"sha256:b9c78242219f674ab645ec571c9a95d70f381319a23911941cd2358a8e0521cf",
"sha256:bccb267678b870d9782c3b44d0cefe3ba0e329f9af8c946d32bf3778e7a4f271",
"sha256:c4df4d27f4c93b2cef74579f00b1d3a31a929c7d8023f870c4b476f03a274db4",
"sha256:caf0e50b546bb60dfa99bb18dfa6748458a83131ecdceaf5c071d74907e7e78a",
"sha256:d3266bd3ac59ac4edcd5fa75165dee80b94a3e5c91049df5f7c057ccf097551c",
"sha256:db0d213987bcd4e6d41710fb4532b22315b0d8fb439ff901782234456556aed1",
"sha256:dbbd5cf7690a40a9f0a9325ab480d0fccf46d16b378eefc08e195d84299bfae1",
"sha256:e16e07a0ec3a75b5ee61f2b1003c35696738f937dc8148fbda9fe2147ccb6e61",
"sha256:e175a006725c7faadbe69e791877d09936c0ef2cf49d01b60a6c1efcb0e8be6f",
"sha256:edd9c13a97f6550f9da2236126bb51c092b3b1ce6187f2bd966533ad794bbb5e",
"sha256:fa39ea60d527fbdd94215b5e5552f1c6a912624521093f1384a491a8ad89ad8b"
],
"index": "pypi",
"version": "==4.2.5"
}, },
"oauth2": { "oauth2": {
"hashes": [ "hashes": [
@ -101,16 +145,17 @@
}, },
"requests": { "requests": {
"hashes": [ "hashes": [
"sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
"sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
], ],
"index": "pypi", "index": "pypi",
"version": "==2.19.1" "version": "==2.21.0"
}, },
"requests-oauthlib": { "requests-oauthlib": {
"hashes": [ "hashes": [
"sha256:8886bfec5ad7afb391ed5443b1f697c6f4ae98d0e5620839d8b4499c032ada3f", "sha256:8886bfec5ad7afb391ed5443b1f697c6f4ae98d0e5620839d8b4499c032ada3f",
"sha256:e21232e2465808c0e892e0e4dbb8c2faafec16ac6dc067dd546e9b466f3deac8" "sha256:e21232e2465808c0e892e0e4dbb8c2faafec16ac6dc067dd546e9b466f3deac8",
"sha256:fe3282f48fb134ee0035712159f5429215459407f6d5484013343031ff1a400d"
], ],
"version": "==1.0.0" "version": "==1.0.0"
}, },
@ -122,17 +167,38 @@
}, },
"six": { "six": {
"hashes": [ "hashes": [
"sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
"sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
], ],
"version": "==1.11.0" "version": "==1.12.0"
},
"unicode-slugify": {
"hashes": [
"sha256:34cf3afefa6480efe705a4fc0eaeeaf7f49754aec322ba3e8b2f27dc1cbcf650"
],
"index": "pypi",
"version": "==0.1.3"
},
"unidecode": {
"hashes": [
"sha256:092cdf7ad9d1052c50313426a625b717dab52f7ac58f859e09ea020953b1ad8f",
"sha256:8b85354be8fd0c0e10adbf0675f6dc2310e56fda43fa8fe049123b6c475e52fb"
],
"version": "==1.0.23"
}, },
"urllib3": { "urllib3": {
"hashes": [ "hashes": [
"sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5" "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
], ],
"version": "==1.23" "version": "==1.24.1"
},
"webencodings": {
"hashes": [
"sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78",
"sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"
],
"version": "==0.5.1"
} }
}, },
"develop": {} "develop": {}

104
common.py
View file

@ -6,6 +6,7 @@ import logging
import shutil import shutil
import subprocess import subprocess
import json import json
from io import BytesIO
import lxml.etree as etree import lxml.etree as etree
from slugify import slugify from slugify import slugify
import requests import requests
@ -41,16 +42,15 @@ class cached_property(object):
setattr(inst, self.name, result) setattr(inst, self.name, result)
return result return result
class Follows(object): class Follows(dict):
def __init__(self): def __init__(self):
self.feeds = {}
self.auth = HTTPBasicAuth( self.auth = HTTPBasicAuth(
keys.miniflux.get('username'), keys.miniflux.get('username'),
keys.miniflux.get('token') keys.miniflux.get('token')
) )
@cached_property @property
def active_subscriptions(self): def subscriptions(self):
feeds = [] feeds = []
params = { params = {
'jsonrpc': '2.0', 'jsonrpc': '2.0',
@ -60,26 +60,29 @@ class Follows(object):
r = requests.post( r = requests.post(
keys.miniflux.get('url'), keys.miniflux.get('url'),
data=json.dumps(params), data=json.dumps(params),
auth=self.auth, auth=self.auth
) )
for feed in r.json().get('result', []): return r.json().get('result', [])
def sync(self):
current = []
for feed in self.subscriptions:
try: try:
feeds.append(feed['feed_url']) current.append(feed['feed_url'])
except Exception as e: except Exception as e:
logging.error('problem with feed entry: %s', feed) logging.error('problem with feed entry: %s', feed)
return feeds for silo, feeds in self.items():
for feed in feeds:
def syncminiflux(self): xmlurl = feed.get('xmlUrl')
for silo, feeds in self.feeds.items(): if len(xmlurl) and xmlurl not in current:
for f in feeds: logging.info('creating subscription for: %s', feed)
feed = f.get('xmlUrl')
if feed not in self.active_subscriptions:
params = { params = {
'jsonrpc': '2.0', 'jsonrpc': '2.0',
'method': 'createFeed', 'method': 'createFeed',
'id': keys.miniflux.get('id'), 'id': keys.miniflux.get('id'),
'params': { 'params': {
'url': feed, 'url': xmlurl,
'group_name': silo 'group_name': silo
} }
} }
@ -89,35 +92,68 @@ class Follows(object):
auth=self.auth, auth=self.auth,
) )
def append(self, silo, feeds):
self.feeds.update({silo: feeds})
def export(self): def export(self):
opml = etree.Element("opml") opml = etree.Element("opml", version="1.0")
xmldoc = etree.ElementTree(opml)
opml.addprevious(
etree.ProcessingInstruction(
"xml-stylesheet",
'type="text/xsl" href="%s"' % (settings.opml.get('xsl'))
)
)
head = etree.SubElement(opml, "head") head = etree.SubElement(opml, "head")
title = etree.SubElement(head, "title").text = "Social media RSS feeds" title = etree.SubElement(head, "title").text = settings.opml.get('title')
dt = etree.SubElement(head, "dateCreated").text = arrow.utcnow().format('ddd, DD MMM YYYY HH:mm:ss UTC')
owner = etree.SubElement(head, "ownerName").text = settings.opml.get('owner')
email = etree.SubElement(head, "ownerEmail").text = settings.opml.get('email')
body = etree.SubElement(opml, "body") body = etree.SubElement(opml, "body")
for silo, feeds in self.feeds.items(): groups = {}
s = etree.SubElement(body, "outline", text=silo) for feed in self.subscriptions:
for f in feeds: # contains sensitive data, skip it
entry = etree.SubElement( if 'sessionid' in feed.get('feed_url') or 'sessionid' in feed.get('site_url'):
s, continue
fgroup = feed.get('groups',None)
if not fgroup:
fgroup = [{
'title': 'Unknown',
'id': -1
}]
fgroup = fgroup.pop()
# some groups need to be skipped
if fgroup['title'].lower() in ['nsfw', '_self']:
continue
if fgroup['title'] not in groups.keys():
groups[fgroup['title']] = etree.SubElement(
body,
"outline", "outline",
type="rss", text=fgroup['title']
text=f.get('text'), )
xmlUrl=f.get('xmlUrl'), entry = etree.SubElement(
htmlUrl=f.get('htmlUrl') groups[fgroup['title']],
) "outline",
type="rss",
text=feed.get('title'),
xmlUrl=feed.get('feed_url'),
htmlUrl=feed.get('site_url')
)
opmlfile = os.path.join( opmlfile = os.path.join(
settings.paths.get('archive'), settings.paths.get('content'),
'feeds.opml' 'following.opml'
) )
with open(opmlfile, 'wb') as f: with open(opmlfile, 'wb') as f:
f.write(etree.tostring(opml, pretty_print=True)) f.write(
etree.tostring(
xmldoc,
encoding='utf-8',
xml_declaration=True,
pretty_print=True
)
)
class Favs(object): class Favs(object):
def __init__(self, silo): def __init__(self, silo):

9
run.py
View file

@ -4,6 +4,7 @@ import Tumblr
import LastFM import LastFM
import DeviantArt import DeviantArt
import Flickr import Flickr
import Artstation
from pprint import pprint from pprint import pprint
lfm = LastFM.LastFM() lfm = LastFM.LastFM()
@ -14,12 +15,14 @@ opml = common.Follows()
silos = [ silos = [
DeviantArt.DAFavs(), DeviantArt.DAFavs(),
Flickr.FlickrFavs(), Flickr.FlickrFavs(),
Tumblr.TumblrFavs() Tumblr.TumblrFavs(),
Artstation.ASFavs()
] ]
for silo in silos: for silo in silos:
silo.run() silo.run()
opml.append(silo.silo, silo.feeds) opml.update({silo.silo: silo.feeds})
opml.syncminiflux()
opml.sync()
opml.export() opml.export()

View file

@ -5,8 +5,16 @@ import logging
base = os.path.abspath(os.path.expanduser('~/Projects/petermolnar.net')) base = os.path.abspath(os.path.expanduser('~/Projects/petermolnar.net'))
opml = {
'owner': 'Peter Molnar',
'email': 'mail@petermolnar.net',
'title': 'feeds followed by petermolnar.net',
'xsl': 'https://petermolnar.net/following.xsl'
}
paths = { paths = {
'archive': os.path.join(base, 'archive'), 'archive': os.path.join(base, 'archive'),
'content': os.path.join(base, 'content'),
} }
loglevels = { loglevels = {
@ -20,7 +28,7 @@ loglevels = {
_parser = argparse.ArgumentParser(description='Parameters for silo.pasta') _parser = argparse.ArgumentParser(description='Parameters for silo.pasta')
_parser.add_argument( _parser.add_argument(
'--loglevel', '--loglevel',
default='debug', default='info',
help='change loglevel' help='change loglevel'
) )