Make number of batches configurable in cleanup script
This commit is contained in:
parent
404f9f0928
commit
d369d30b32
1 changed files with 8 additions and 8 deletions
16
cleanup.py
16
cleanup.py
|
@ -6,7 +6,7 @@ from dotenv import load_dotenv
|
||||||
from mastodon import Mastodon, MastodonRatelimitError
|
from mastodon import Mastodon, MastodonRatelimitError
|
||||||
from os import getenv
|
from os import getenv
|
||||||
|
|
||||||
def cleanup(offset: int = 200, limit: int = 40, id: int | None = None):
|
def cleanup(offset: int = 200, num_batches: int = 10, limit: int = 40, start_id: int | None = None):
|
||||||
"""Delete old posts without any interactions.
|
"""Delete old posts without any interactions.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -34,7 +34,7 @@ def cleanup(offset: int = 200, limit: int = 40, id: int | None = None):
|
||||||
else:
|
else:
|
||||||
return mastodon.account_statuses(getenv("MASTODON_USER_ID"), exclude_replies=True, max_id=max_id, limit=limit)
|
return mastodon.account_statuses(getenv("MASTODON_USER_ID"), exclude_replies=True, max_id=max_id, limit=limit)
|
||||||
|
|
||||||
if id is None:
|
if start_id is None:
|
||||||
last_posts = []
|
last_posts = []
|
||||||
id_oldest_of_last_posts = -1
|
id_oldest_of_last_posts = -1
|
||||||
iterations = offset // 40
|
iterations = offset // 40
|
||||||
|
@ -55,8 +55,8 @@ def cleanup(offset: int = 200, limit: int = 40, id: int | None = None):
|
||||||
id_oldest_of_last_posts = last_posts[-1]["id"]
|
id_oldest_of_last_posts = last_posts[-1]["id"]
|
||||||
print(f"Currently at offset {iterations * 40 + remainder}, last ignored post: {last_posts[-1]['url']}")
|
print(f"Currently at offset {iterations * 40 + remainder}, last ignored post: {last_posts[-1]['url']}")
|
||||||
else:
|
else:
|
||||||
id_oldest_of_last_posts = id
|
id_oldest_of_last_posts = start_id
|
||||||
last_posts = mastodon.account_statuses(getenv("MASTODON_USER_ID"), exclude_replies=True, max_id=id, limit=limit)
|
last_posts = mastodon.account_statuses(getenv("MASTODON_USER_ID"), exclude_replies=True, max_id=start_id, limit=limit)
|
||||||
# return if there are no posts to check
|
# return if there are no posts to check
|
||||||
if len(last_posts) == 0:
|
if len(last_posts) == 0:
|
||||||
print("There are no older posts than the given id.")
|
print("There are no older posts than the given id.")
|
||||||
|
@ -64,12 +64,12 @@ def cleanup(offset: int = 200, limit: int = 40, id: int | None = None):
|
||||||
|
|
||||||
# ----- delete old inactive posts ----- #
|
# ----- delete old inactive posts ----- #
|
||||||
|
|
||||||
# default: check 100 batches of 40 posts
|
# default: check 10 batches of 40 posts
|
||||||
# this usually tries until the rate limit is triggered or there are no more posts
|
# this usually tries until the rate limit is triggered or there are no more posts
|
||||||
# if hitting the rate limit is undesired, reduce the number of iterations
|
# if hitting the rate limit is undesired, reduce the number of iterations
|
||||||
for i in range(100):
|
for i in range(num_batches):
|
||||||
# skip getting new batch in the first iteration if the id was set manually
|
# skip getting new batch in the first iteration if the id was set manually
|
||||||
if i != 0 or id is None:
|
if i != 0 or start_id is None:
|
||||||
# find id of the currently oldest post
|
# find id of the currently oldest post
|
||||||
last_post = last_posts[-1]
|
last_post = last_posts[-1]
|
||||||
if last_post["favourites_count"] != 0 or last_post["reblogs_count"] != 0 or last_post["replies_count"] != 0:
|
if last_post["favourites_count"] != 0 or last_post["reblogs_count"] != 0 or last_post["replies_count"] != 0:
|
||||||
|
@ -95,4 +95,4 @@ def cleanup(offset: int = 200, limit: int = 40, id: int | None = None):
|
||||||
print("Ignoring post:", submission["url"])
|
print("Ignoring post:", submission["url"])
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
cleanup(offset=200, limit=40, id=None)
|
cleanup(offset=200, num_batches=10, limit=40, start_id=None)
|
||||||
|
|
Loading…
Reference in a new issue