With this code I can print message ID and filenames into results.txt and I have not flood problems. Only problem is that it doesn't print strings between 27-28 august 2023 but it prints all strings.
import time
from pyrogram import Client
from datetime import datetime, timedelta
app = Client(
name="@Peter_LongX",
api_id=*******,
api_hash="b**********************",
phone_number="+3*****",
password="" or None
)
group_id = -1001867911973
topic_id = 664
msg_file_dict = {}
start_date = datetime(2023, 8, 27)
end_date = datetime(2023, 8, 28)
async def main():
async with app:
processed_messages = 0 # Initialize the counter
async for message in app.get_discussion_replies(chat_id=group_id, message_id=topic_id):
print(f"Message ID: {message.id}")
message_date = message.date
if start_date <= message_date <= end_date: # Check if the message date is within the specified range
print(f"Message ID: {message.id}")
file_name = None # Declare the variable outside the if statement
if message.video or (message.document and (message.document.mime_type.endswith("rar") or message.document.mime_type.endswith("zip") or message.document.mime_type.endswith("pdf") or message.document.mime_type.endswith("epub") or message.document.mime_type.endswith("cbr"))):
file = message.video or message.document
print("Video or rar/zip/pdf/epub/cbr file found")
msg_id = message.id
file_name = file.file_name or f"VID_{message.id}_{file.file_unique_id}.{file.mime_type.split('/')[-1]}"
print(file_name)
msg_file_dict[msg_id] = file_name
print()
# Appending results to the text file with utf-8 encoding
with open("results.txt", "a", encoding="utf-8") as file:
if file_name:
file.write(f"Message ID: {message.id}\n")
file.write(f"File Name: {file_name}\n")
file.write("\n")
processed_messages += 1 # Increment the counter
if processed_messages % 40 == 0:
time.sleep(15) # Pause for 15 seconds after every 40 messages
app.run(main())
So, I thought I'd modify the code a bit so that I can only print strings that are between Aug 27-28. Indeed, in this way, I print strings within those dates.. but, unlike before, now the terminal gives me a FLOOD problem. I don't understand why if I didn't get this error before
[previous code]
async def main():
async with app:
processed_messages = 0 # Initialize the counter
async for message in app.get_discussion_replies(chat_id=group_id, message_id=topic_id):
message_date = message.date
if start_date <= message_date <= end_date: # Check if the message date is within the specified range
print(f"Message ID: {message.id}")
file_name = None # Initialize the variable
if message.video or (message.document and (message.document.mime_type.endswith("rar") or message.document.mime_type.endswith("zip") or message.document.mime_type.endswith("pdf") or message.document.mime_type.endswith("epub") or message.document.mime_type.endswith("cbr"))):
file = message.video or message.document
print("Video or rar/zip/pdf/epub/cbr file found")
msg_id = message.id
file_name = file.file_name or f"VID_{message.id}_{file.file_unique_id}.{file.mime_type.split('/')[-1]}"
print(file_name)
msg_file_dict[msg_id] = file_name
# Appending results to the text file with utf-8 encoding
with open("results.txt", "a", encoding="utf-8") as file:
file.write(f"Message ID: {msg_id}\n")
file.write(f"File Name: {file_name}\n")
file.write("\n")
processed_messages += 1 # Increment the counter
if processed_messages % 40 == 0:
time.sleep(15) # Pause for 15 seconds after every 40 messages
app.run(main())
The error in second code is this
raise getattr(
pyrogram.errors.exceptions.flood_420.FloodWait: Telegram says: [420 FLOOD_WAIT_X] - A wait of 15 seconds is required (caused by "messages.GetReplies")
Please look at the difference between the 2 behaviors, I don't understand why introducing dates as a filter now gives me a flood error when it didn't before.
HERE here you can see the difference between running the first code vs the second code
In my opinion, but I could be wrong, perhaps it is necessary to implement some code to manage the logic of the datetime intervals because if the logic of the simple listing does not give me flood problems because introducing datetime filter then there is something that produces the flood error