add auto tumblr, pt. 1
This commit is contained in:
parent
d95fa1e048
commit
45dfdac742
3 changed files with 29 additions and 8 deletions
29
delojza.py
29
delojza.py
|
@ -9,6 +9,7 @@ from glob import glob
|
|||
|
||||
import filetype
|
||||
import markovify
|
||||
import pytumblr
|
||||
import requests
|
||||
import youtube_dl
|
||||
from telegram import MessageEntity
|
||||
|
@ -22,6 +23,13 @@ logging.basicConfig(level=logging.INFO,
|
|||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger("kunsax")
|
||||
|
||||
client = pytumblr.TumblrRestClient(
|
||||
'***REMOVED***',
|
||||
'***REMOVED***',
|
||||
'***REMOVED***',
|
||||
'***REMOVED***'
|
||||
)
|
||||
|
||||
markov = None
|
||||
|
||||
|
||||
|
@ -78,9 +86,11 @@ def download_ydl(urls, subdir, date, extract=False, filename=None):
|
|||
for globbed in globbeds:
|
||||
logger.info("Moving %s to %s..." % (globbed, out_dir))
|
||||
shutil.move(globbed, out_dir)
|
||||
return []
|
||||
|
||||
|
||||
def download_raw(urls, subdir, date, extract=False, filename=None):
|
||||
filenames = []
|
||||
for url in urls:
|
||||
local_filename = OUT_DIR + '/' + subdir + '/' + "%s__%s" % (datestr(date), filename or url.split('/')[-1])
|
||||
r = requests.get(url, stream=True)
|
||||
|
@ -95,6 +105,8 @@ def download_raw(urls, subdir, date, extract=False, filename=None):
|
|||
else:
|
||||
logger.info('Moving file without extension... %s?' % kind.extension)
|
||||
shutil.move(local_filename, local_filename + '.' + kind.extension)
|
||||
filenames.append(local_filename)
|
||||
return filenames
|
||||
|
||||
|
||||
last_hashtag = None
|
||||
|
@ -147,10 +159,10 @@ def handle(urls, message, download, filename=None):
|
|||
if hashtag == 'AUDIO' and download != download_raw:
|
||||
reply += ' (And also guessing you want to extract the audio)'
|
||||
message.reply_text(reply)
|
||||
download(urls,
|
||||
hashtag or '.', message.date,
|
||||
extract=(hashtag == 'AUDIO'),
|
||||
filename=filename)
|
||||
return download(urls,
|
||||
hashtag or '.', message.date,
|
||||
extract=(hashtag == 'AUDIO'),
|
||||
filename=filename)
|
||||
except:
|
||||
_, exc_value, __ = sys.exc_info()
|
||||
if "Timed out" not in str(exc_value):
|
||||
|
@ -168,10 +180,11 @@ def handle_url(bot, update):
|
|||
|
||||
# noinspection PyBroadException
|
||||
def handle_rest(bot, update):
|
||||
file, filename = None, None
|
||||
file, filename, tumblr = None, None, False
|
||||
if len(update.message.photo) > 0:
|
||||
photo = max(update.message.photo, key=lambda p: p.width)
|
||||
file = photo.file_id
|
||||
tumblr = True
|
||||
elif update.message.document is not None:
|
||||
filename = update.message.document.file_name
|
||||
file = update.message.document.file_id
|
||||
|
@ -187,7 +200,11 @@ def handle_rest(bot, update):
|
|||
|
||||
if file is not None:
|
||||
url = bot.getFile(file).file_path
|
||||
handle([url], update.message, download_raw, filename=filename)
|
||||
filenames = handle([url], update.message, download_raw, filename=filename)
|
||||
if tumblr:
|
||||
update.message.reply_text('(btw, queueing to tumblr)')
|
||||
for filename in filenames:
|
||||
client.create_photo('kunsaxan', state="queue", data=filename)
|
||||
|
||||
|
||||
def handle_text(bot, update):
|
||||
|
|
|
@ -2,4 +2,5 @@ python-telegram-bot
|
|||
youtube-dl
|
||||
requests
|
||||
filetype
|
||||
markovify
|
||||
markovify
|
||||
pytumblr
|
|
@ -10,11 +10,14 @@ cffi==1.11.5 # via cryptography
|
|||
chardet==3.0.4 # via requests
|
||||
cryptography==2.3.1 # via python-telegram-bot
|
||||
filetype==1.0.1
|
||||
future==0.16.0 # via python-telegram-bot
|
||||
future==0.16.0 # via python-telegram-bot, pytumblr
|
||||
idna==2.7 # via cryptography, requests
|
||||
markovify==0.7.1
|
||||
oauthlib==2.1.0 # via requests-oauthlib
|
||||
pycparser==2.19 # via cffi
|
||||
python-telegram-bot==11.1.0
|
||||
pytumblr==0.0.8
|
||||
requests-oauthlib==1.0.0 # via pytumblr
|
||||
requests==2.19.1
|
||||
six==1.11.0 # via cryptography
|
||||
unidecode==1.0.22 # via markovify
|
||||
|
|
Loading…
Reference in a new issue