Show all saved posts, only show top 5 subreddits in subreddit listing, don't download duplicate files on any single fetch attempt

This commit is contained in:
John Stephani 2026-01-14 00:00:32 -06:00
parent 780d74fe89
commit d9944a88a1
3 changed files with 10 additions and 8 deletions

View File

@ -205,9 +205,8 @@ def get_saved():
saved = ?
ORDER BY
{sort}
LIMIT ?
"""
binds = [True, config.posts_per_page_load]
binds = [True]
posts = get_posts_from_select(cursor, select, binds)
connection.close()
return render_template(
@ -349,16 +348,16 @@ def get_sidebar_links(cursor):
FROM
post
WHERE
hidden = ?
hidden = ? AND
saved = ?
GROUP BY
subreddit
ORDER BY
count desc
) t
WHERE
count > ?
LIMIT ?
"""
binds = [False, config.other_posts_cutoff]
binds = [False, False, config.top_subreddits]
results = cursor.execute(select, binds).fetchall()
links = [f"/r/{sub[0]}" for sub in results]
links.insert(0, "/r/all")

View File

@ -1,7 +1,7 @@
# Scheduler configuration
max_age_days = 30
max_age_seconds = max_age_days * 24 * 60 * 60
other_posts_cutoff = 4 #subreddits with this many unread posts or fewer are merged to /r/other
top_subreddits = 5
# Webpage configuration
posts_per_page_load = 10

View File

@ -82,6 +82,7 @@ def download_media(cursor):
binds = [False, False]
results = cursor.execute(select, binds)
post = results.fetchone()
downloads = {}
binds = []
while post is not None:
post = json.loads(post[0])
@ -89,7 +90,9 @@ def download_media(cursor):
for url in post["media_urls"]:
binds.append(post["permalink"])
binds.append(url)
path = download_image(url, config.media_dir)
if url not in downloads:
downloads[url] = download_image(url, config.media_dir)
path = downloads[url]
binds.append(path)
print(f"Downloaded {path}")
post = results.fetchone()