unify all handlers into one for clarity and proper text message handling
(first try)
This commit is contained in:
parent
ed81096071
commit
903ad07e3c
1 changed files with 39 additions and 51 deletions
90
delojza.py
90
delojza.py
|
@ -22,8 +22,7 @@ import pytumblr
|
|||
import requests
|
||||
import telegram
|
||||
import youtube_dl
|
||||
from telegram import MessageEntity
|
||||
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
|
||||
from telegram.ext import Updater, CommandHandler, MessageHandler
|
||||
from youtube_dl.version import __version__ as YTDL_VERSION
|
||||
|
||||
|
||||
|
@ -58,13 +57,7 @@ class DelojzaBot:
|
|||
|
||||
dp.add_handler(CommandHandler("start", self.tg_start))
|
||||
dp.add_error_handler(self.tg_error)
|
||||
self.tg_url_handler = MessageHandler(Filters.entity(MessageEntity.URL), self.tg_handle_url)
|
||||
dp.add_handler(self.tg_url_handler)
|
||||
self.tg_rest_handler = MessageHandler(Filters.photo | Filters.video | Filters.video_note |
|
||||
Filters.audio | Filters.voice | Filters.document, self.tg_handle_rest)
|
||||
dp.add_handler(self.tg_rest_handler)
|
||||
dp.add_handler(MessageHandler(Filters.entity(MessageEntity.HASHTAG), self.tg_handle_hashtag))
|
||||
dp.add_handler(MessageHandler(Filters.text, self.tg_handle_text))
|
||||
dp.add_handler(MessageHandler(None, self.tg_handle))
|
||||
dp.add_handler(CommandHandler("stats", self.tg_stats))
|
||||
dp.add_handler(CommandHandler("orphans", self.tg_orphan))
|
||||
dp.add_handler(CommandHandler("orphans_full", self.tg_orphan_full))
|
||||
|
@ -251,7 +244,7 @@ class DelojzaBot:
|
|||
hashtags[i] = "PRAS"
|
||||
return hashtags
|
||||
|
||||
def get_hashtags(self, message):
|
||||
def _get_hashtags(self, message):
|
||||
hashtags = self.extract_hashtags(message)
|
||||
if len(hashtags) == 0 and self.last_hashtags is not None:
|
||||
user, chat, ts, last_hashtags = self.last_hashtags
|
||||
|
@ -259,17 +252,6 @@ class DelojzaBot:
|
|||
hashtags = last_hashtags
|
||||
return hashtags
|
||||
|
||||
def tg_handle_hashtag(self, bot, update):
|
||||
hashtags = self.extract_hashtags(update.message)
|
||||
|
||||
self.handle_text(update.message.reply_to_message or update.message, hashtags)
|
||||
|
||||
if update.message.reply_to_message:
|
||||
self.handle_tg_message(update.message.reply_to_message, bot, hashtags)
|
||||
self.handle_urls(update.message.reply_to_message, hashtags)
|
||||
else:
|
||||
self.last_hashtags = update.message.from_user, update.message.chat, datetime.now(), hashtags
|
||||
|
||||
def handle_text(self, message, hashtags):
|
||||
if len(hashtags) == 0 or hashtags[0] not in ('TEXT', 'TXT'):
|
||||
return
|
||||
|
@ -342,25 +324,6 @@ class DelojzaBot:
|
|||
if "Timed out" not in str(exc_value):
|
||||
message.reply_text("Something is FUCKED: [{}] {}".format(exc_type, exc_value))
|
||||
|
||||
def tg_handle_url(self, _, update):
|
||||
self.handle_urls(update.message, self.get_hashtags(update.message))
|
||||
|
||||
def handle_urls(self, message, hashtags):
|
||||
urls = list(map(lambda e: message.parse_entity(e),
|
||||
filter(lambda e: e.type == 'url', message.entities)))
|
||||
ytdl_urls = [url for url in urls if self.ytdl_can(url)]
|
||||
normal_urls = [url for url in urls if not self.ytdl_can(url)]
|
||||
if len(ytdl_urls) > 0:
|
||||
self.handle(ytdl_urls, message, hashtags, self.download_ytdl)
|
||||
if len(normal_urls) > 0:
|
||||
file_urls = [url for url in normal_urls if
|
||||
"text" not in requests.head(url).headers.get("Content-Type", "text")]
|
||||
if len(file_urls) > 0:
|
||||
self.handle(file_urls, message, hashtags, self.download_raw)
|
||||
|
||||
def tg_handle_rest(self, bot, update):
|
||||
self.handle_tg_message(update.message, bot, self.get_hashtags(update.message))
|
||||
|
||||
def handle_tg_message(self, message, bot, hashtag):
|
||||
file, filetitle, tumblr = None, None, False
|
||||
if len(message.photo) > 0:
|
||||
|
@ -383,17 +346,43 @@ class DelojzaBot:
|
|||
url = bot.getFile(file).file_path
|
||||
self.handle([url], message, hashtag, self.download_raw, filetitle=filetitle)
|
||||
|
||||
def tg_handle_text(self, _, update):
|
||||
if self.markov:
|
||||
self.markov.add_to_corpus(update.message.text)
|
||||
def handle_urls(self, message, hashtags):
|
||||
urls = list(map(lambda e: message.parse_entity(e),
|
||||
filter(lambda e: e.type == 'url', message.entities)))
|
||||
ytdl_urls = [url for url in urls if self.ytdl_can(url)]
|
||||
normal_urls = [url for url in urls if not self.ytdl_can(url)]
|
||||
if len(ytdl_urls) > 0:
|
||||
self.handle(ytdl_urls, message, hashtags, self.download_ytdl)
|
||||
if len(normal_urls) > 0:
|
||||
file_urls = [url for url in normal_urls if
|
||||
"text" not in requests.head(url).headers.get("Content-Type", "text")]
|
||||
if len(file_urls) > 0:
|
||||
self.handle(file_urls, message, hashtags, self.download_raw)
|
||||
|
||||
def tag_dirs(self):
|
||||
def tg_handle(self, bot, update):
|
||||
self.handle_urls(update.message, self._get_hashtags(update.message))
|
||||
self.handle_tg_message(update.message, bot, self._get_hashtags(update.message))
|
||||
|
||||
hashtags = self.extract_hashtags(update.message)
|
||||
if len(hashtags) > 0:
|
||||
self.handle_text(update.message.reply_to_message or update.message, hashtags)
|
||||
|
||||
if update.message.reply_to_message:
|
||||
self.handle_tg_message(update.message.reply_to_message, bot, hashtags)
|
||||
self.handle_urls(update.message.reply_to_message, hashtags)
|
||||
else:
|
||||
self.last_hashtags = update.message.from_user, update.message.chat, datetime.now(), hashtags
|
||||
else:
|
||||
if self.markov:
|
||||
self.markov.add_to_corpus(update.message.text)
|
||||
|
||||
def _get_tag_dirs(self):
|
||||
return list(filter(lambda x: x.upper() == x,
|
||||
filter(lambda directory: os.path.isdir(os.path.join(self.out_dir, directory)),
|
||||
os.listdir(self.out_dir))))
|
||||
|
||||
def tg_stats(self, _, update):
|
||||
tag_dirs = self.tag_dirs()
|
||||
tag_dirs = self._get_tag_dirs()
|
||||
reply = "Total number of tags: {}\n\n".format(len(tag_dirs))
|
||||
counts = [(directory, os.listdir(os.path.join(self.out_dir, directory))) for directory in tag_dirs]
|
||||
counts.sort(key=itemgetter(0))
|
||||
|
@ -420,10 +409,9 @@ class DelojzaBot:
|
|||
reply += "\nFollowing tags are orphans: " + ", ".join(map(itemgetter(0), orphans))
|
||||
update.message.reply_text(reply, parse_mode=telegram.ParseMode.HTML)
|
||||
|
||||
def orphans(self):
|
||||
def _get_orphan_tags(self):
|
||||
result = []
|
||||
tag_dirs = self.tag_dirs()
|
||||
for directory in tag_dirs:
|
||||
for directory in self._get_tag_dirs():
|
||||
files = os.listdir(os.path.join(self.out_dir, directory))
|
||||
if len(files) == 1:
|
||||
result.append((directory, files[0]))
|
||||
|
@ -432,7 +420,7 @@ class DelojzaBot:
|
|||
return sorted(result, key=itemgetter(0))
|
||||
|
||||
def tg_orphan(self, _, update):
|
||||
orphans = self.orphans()
|
||||
orphans = self._get_orphan_tags()
|
||||
if len(orphans) == 0:
|
||||
update.message.reply_text("Good job, no orphan tags!")
|
||||
else:
|
||||
|
@ -440,7 +428,7 @@ class DelojzaBot:
|
|||
", ".join(map(itemgetter(0), orphans)))
|
||||
|
||||
def tg_orphan_full(self, _, update):
|
||||
orphans = self.orphans()
|
||||
orphans = self._get_orphan_tags()
|
||||
if len(orphans) == 0:
|
||||
update.message.reply_text("Good job, no orphan tags!")
|
||||
else:
|
||||
|
@ -497,7 +485,7 @@ class DelojzaBot:
|
|||
if update is not None:
|
||||
default = "Mmmm, I like it..."
|
||||
update.message.reply_text((self.markov.make_sentence(tries=100) if self.markov else default) or default)
|
||||
self.tg_handle_rest(bot, update)
|
||||
self.tg_handle(bot, update)
|
||||
else:
|
||||
if update is not None:
|
||||
update.message.reply_text("Something is fucked: %s" % error)
|
||||
|
|
Loading…
Reference in a new issue