import re
import asyncio
import logging
import requests
import traceback
import urllib.parse
import bitlyshortener
from aioify import aioify
from typing import Union
from bs4 import BeautifulSoup
from multiprocessing import Process
from pyrogram import Client, errors, filters, types, idle, handlers, enums
logging.basicConfig(
format='%(name)s - %(levelname)s - %(message)s',
handlers=[logging.StreamHandler()],
level=logging.INFO
)
logger = logging.getLogger(__name__)
logger.info("Starting Bot ...")
app = Client(
name="pryogram",
no_updates=True,
in_memory=True,
api_id=2879766,
api_hash="089d771e15ecee0bd0cbe58e2e218423",
session_string="BQCLSbOG3HzIFOXYblfpjhkqkAC5G4uRz3Hs91d7y6Xb6qm3Lmm22mAFw2CJ7bXbcxvUb36dsMcOvRYvhHMExhbbhUXWwRkjBwJViI77Y5niQQHYJ5SOt0s4uCDW4qa5IUlfdrDlE6KAFDiQkt79258_olcdQBZwmsHGliY6c1qqc1pueueIbY-hBarmBC3Ggjg4W-TrGK32fMSC7N37nIJOvUvwVG-NLyIS5qy29GfRrxMV39b1ujcLYdk8WZy7SUeWSutQvfhD2GIUtq_hxA65eOHJUCA8vr9YIfkQ1Ru825L5Qz_1J06dob1tUlpM90aTgGl4KBBtvjMNzk6Ev-qzHbU4sgA"
)
app0_ = Client(
name="updates_bot",
in_memory=True,
api_id=7188176,
api_hash="00354ec58538f2518bfcb45537a182e2",
bot_token="5608478008:AAHa6h1-KuDl-5GzT9MQ9xfh93ol7-zBiQg"
)
LOCAL_DB = {
"isRunning": True,
"authUserIDs": [498415794, 1445283714]
}
async def __send_msg(_text: str, chat_id: Union[str, int]):
try:
async with app:
await app.send_message(
chat_id=chat_id,
text="✅ " + _text,
entities=[types.MessageEntity(
type=enums.MessageEntityType.CUSTOM_EMOJI,
custom_emoji_id=4983406439939703316,
offset=0,
length=1
)],
disable_web_page_preview=True
)
logger.info("Sent Message ...")
except errors.FloodWait as ex:
logger.info(f"Sleeping for {ex.value}s ...")
await asyncio.sleep(ex.value)
await __send_msg(_text, chat_id)
async def send_message(_text: str):
chat_ids = [-1001388213936,-1001673930867]
for chat_id in chat_ids:
await __send_msg(_text, chat_id)
def short_url(url: str):
try:
shortener = bitlyshortener.Shortener(
tokens=[
"75fb7fd2b8e95792ce301afed3df25560053d3b9",
"5f70de359cc334c43f6031f0a4efb80c9fe48998",
"9b5242a5e0d6f182e23a6feeb38052ae1ac10564"
"ed5cb3592ad31964c06ea117a8bae63a99393921",
],
max_cache_size=256
)
new_url = shortener.shorten_urls([url])[0]
except:
new_url = url
return new_url
def scrape(_url: str):
text_ = ""
res_ = requests.get("https://www.desidime.com/"+_url)
soup_ = BeautifulSoup(res_.content, "lxml")
title = soup_.find("h1", attrs={"class": "f20 mb20 bold"}).text.strip()
offer_price = soup_.find("div", attrs={"class": "dealprice"}).text.strip()
link = soup_.find("div", attrs={"class": "mainpost usercomment"}).find("a").get("href").rsplit("url=", 1)[-1]
link = urllib.parse.unquote(link)
if "flipkart.com" in link:
link = "https://www.paisawapas.com/rl/1670229?slug=flipkartearn&url=" + urllib.parse.quote(link)
elif "myntra.com" in link:
link = "https://www.paisawapas.com/rl/1670229?slug=ep-myntra&url=" + urllib.parse.quote(link)
elif "ajio.com" in link:
link = "https://www.paisawapas.com/rl/1670229?slug=ep-ajio&url=" + urllib.parse.quote(link)
elif "amazon.in" in link:
if "tag=" in link:
link = re.sub(r"tag=.*(&)?", "tag=collab-earticledeal-21&ref=as_li_ss_tl", link)
else:
if "/s?" in link:
link += "&tag=collab-earticledeal-21&ref=as_li_ss_tl"
elif "?" in link:
link = link.split("?", 1)[0]
link += "?tag=collab-earticledeal-21&ref=as_li_ss_tl"
else:
link += "?tag=collab-earticledeal-21&ref=as_li_ss_tl"
text_ += f"{title} @ {offer_price}\n\n" \
f"{short_url(link)}"
return text_
async def looped_run():
last_post_title = ""
while True:
if not LOCAL_DB.get("isRunning"):
await asyncio.sleep(1)
continue
logger.info("Sleeping for 30s ...")
await asyncio.sleep(30)
try:
res = await aioify(obj=requests.get)("https://www.desidime.com/new?deals_view=deal_text_view")
soup = BeautifulSoup(res.content, "lxml")
block = soup.find("div", attrs={"class": "deal-text-item"})
deal_type = block.find("div", attrs={"class": "f16 cdblue"}).text.strip()
if deal_type not in ["Amazon", "Flipkart", "Myntra", "ajio"]:
continue
r_title = block.find("div", attrs={"class": "deal-text-title"}).text.strip()
if r_title == last_post_title:
continue
if not last_post_title:
last_post_title = r_title
continue
last_post_title = r_title
logger.info("Got New Post ...")
url = block.find("div", attrs={"class": "deal-text-title"}).find("a").get("href")
# print("URL:- ", url)
logger.info("Processing ...")
text = await aioify(obj=scrape)(url)
if not text:
logger.info("Skipped ...")
continue
await send_message(text)
logger.info("Sent Post in Chat ...")
logger.info(f"{res}")
except Exception as e:
if str(e).strip() == "'NoneType' object has no attribute 'text'":
logger.info("Not a offer Post ...")
logger.info(f"{e}")
continue
logger.info(f"{e}")
traceback.print_exc()
continue
async def updates_run():
logger.info("Importing Handlers ...")
async def start_cmd(_, m: "types.Message"):
if m.from_user.id not in LOCAL_DB["authUserIDs"]:
return
LOCAL_DB["isRunning"] = True
return await m.reply_text(
text="Hi, I'm Running ...", quote=True, disable_web_page_preview=True
)
async def stop_cmd(_, m: "types.Message"):
if m.from_user.id not in LOCAL_DB["authUserIDs"]:
return
LOCAL_DB["isRunning"] = False
return await m.reply_text(
text="Hi, I'm NOT Running ...", quote=True, disable_web_page_preview=True
)
async def auth_cmd(_, m: "types.Message"):
LOCAL_DB["authUserIDs"].append(int(m.text.split(" ", 1)[-1]))
return await m.reply_text("Added in Auth ...", True)
start_cmd_handler = handlers.MessageHandler(
start_cmd, filters=filters.command("start")
)
app0_.add_handler(start_cmd_handler)
stop_cmd_handler = handlers.MessageHandler(
stop_cmd, filters=filters.command("stop")
)
app0_.add_handler(stop_cmd_handler)
auth_cmd_handler = handlers.MessageHandler(
auth_cmd, filters=filters.command("auth") & filters.user([498415794, 1445283714])
)
app0_.add_handler(auth_cmd_handler)
logger.info("Imported Successfully ...")
await app0_.start()
await idle()
await app0_.stop()
def thread_with_looped_run():
try:
logger.info("Looped Bot Started ...")
asyncio.get_event_loop().run_until_complete(looped_run())
logger.info("Lopped Bot Stopped ...")
except Exception as err:
logger.info("Looped Bot Stopped ...")
logger.info(err)
def thread_with_updates_run():
try:
logger.info("Updates Bot Started ...")
asyncio.get_event_loop().run_until_complete(updates_run())
logger.info("Updates Bot Stopped ...")
except Exception as err:
logger.info("Updates Bot Stopped ...")
logger.info(err)
if __name__ == '__main__':
try:
thread1 = Process(target=thread_with_looped_run)
thread1.start()
thread2 = Process(target=thread_with_updates_run)
thread2.start()
thread1.join()
thread2.join()
except:
logger.info("Exiting ...")
exit()
0 comments:
Post a Comment