diff --git a/app/app.py b/app/app.py index 80a70e7..5fdcad3 100755 --- a/app/app.py +++ b/app/app.py @@ -205,9 +205,8 @@ def get_saved(): saved = ? ORDER BY {sort} - LIMIT ? """ - binds = [True, config.posts_per_page_load] + binds = [True] posts = get_posts_from_select(cursor, select, binds) connection.close() return render_template( @@ -349,16 +348,16 @@ def get_sidebar_links(cursor): FROM post WHERE - hidden = ? + hidden = ? AND + saved = ? GROUP BY subreddit ORDER BY count desc ) t - WHERE - count > ? + LIMIT ? """ - binds = [False, config.other_posts_cutoff] + binds = [False, False, config.top_subreddits] results = cursor.execute(select, binds).fetchall() links = [f"/r/{sub[0]}" for sub in results] links.insert(0, "/r/all") diff --git a/app/config.py b/app/config.py index dc228a0..8628a8e 100644 --- a/app/config.py +++ b/app/config.py @@ -1,7 +1,7 @@ # Scheduler configuration max_age_days = 30 max_age_seconds = max_age_days * 24 * 60 * 60 -other_posts_cutoff = 4 #subreddits with this many unread posts or fewer are merged to /r/other +top_subreddits = 5 # Webpage configuration posts_per_page_load = 10 diff --git a/app/scrape_posts.py b/app/scrape_posts.py index b8d30da..2a36f65 100755 --- a/app/scrape_posts.py +++ b/app/scrape_posts.py @@ -82,6 +82,7 @@ def download_media(cursor): binds = [False, False] results = cursor.execute(select, binds) post = results.fetchone() + downloads = {} binds = [] while post is not None: post = json.loads(post[0]) @@ -89,7 +90,9 @@ def download_media(cursor): for url in post["media_urls"]: binds.append(post["permalink"]) binds.append(url) - path = download_image(url, config.media_dir) + if url not in downloads: + downloads[url] = download_image(url, config.media_dir) + path = downloads[url] binds.append(path) print(f"Downloaded {path}") post = results.fetchone()