Show all saved posts, only show top 5 subreddits in subreddit listing, don't download duplicate files on any single fetch attempt
This commit is contained in:
parent
780d74fe89
commit
d9944a88a1
11
app/app.py
11
app/app.py
|
|
@ -205,9 +205,8 @@ def get_saved():
|
||||||
saved = ?
|
saved = ?
|
||||||
ORDER BY
|
ORDER BY
|
||||||
{sort}
|
{sort}
|
||||||
LIMIT ?
|
|
||||||
"""
|
"""
|
||||||
binds = [True, config.posts_per_page_load]
|
binds = [True]
|
||||||
posts = get_posts_from_select(cursor, select, binds)
|
posts = get_posts_from_select(cursor, select, binds)
|
||||||
connection.close()
|
connection.close()
|
||||||
return render_template(
|
return render_template(
|
||||||
|
|
@ -349,16 +348,16 @@ def get_sidebar_links(cursor):
|
||||||
FROM
|
FROM
|
||||||
post
|
post
|
||||||
WHERE
|
WHERE
|
||||||
hidden = ?
|
hidden = ? AND
|
||||||
|
saved = ?
|
||||||
GROUP BY
|
GROUP BY
|
||||||
subreddit
|
subreddit
|
||||||
ORDER BY
|
ORDER BY
|
||||||
count desc
|
count desc
|
||||||
) t
|
) t
|
||||||
WHERE
|
LIMIT ?
|
||||||
count > ?
|
|
||||||
"""
|
"""
|
||||||
binds = [False, config.other_posts_cutoff]
|
binds = [False, False, config.top_subreddits]
|
||||||
results = cursor.execute(select, binds).fetchall()
|
results = cursor.execute(select, binds).fetchall()
|
||||||
links = [f"/r/{sub[0]}" for sub in results]
|
links = [f"/r/{sub[0]}" for sub in results]
|
||||||
links.insert(0, "/r/all")
|
links.insert(0, "/r/all")
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
# Scheduler configuration
|
# Scheduler configuration
|
||||||
max_age_days = 30
|
max_age_days = 30
|
||||||
max_age_seconds = max_age_days * 24 * 60 * 60
|
max_age_seconds = max_age_days * 24 * 60 * 60
|
||||||
other_posts_cutoff = 4 #subreddits with this many unread posts or fewer are merged to /r/other
|
top_subreddits = 5
|
||||||
|
|
||||||
# Webpage configuration
|
# Webpage configuration
|
||||||
posts_per_page_load = 10
|
posts_per_page_load = 10
|
||||||
|
|
|
||||||
|
|
@ -82,6 +82,7 @@ def download_media(cursor):
|
||||||
binds = [False, False]
|
binds = [False, False]
|
||||||
results = cursor.execute(select, binds)
|
results = cursor.execute(select, binds)
|
||||||
post = results.fetchone()
|
post = results.fetchone()
|
||||||
|
downloads = {}
|
||||||
binds = []
|
binds = []
|
||||||
while post is not None:
|
while post is not None:
|
||||||
post = json.loads(post[0])
|
post = json.loads(post[0])
|
||||||
|
|
@ -89,7 +90,9 @@ def download_media(cursor):
|
||||||
for url in post["media_urls"]:
|
for url in post["media_urls"]:
|
||||||
binds.append(post["permalink"])
|
binds.append(post["permalink"])
|
||||||
binds.append(url)
|
binds.append(url)
|
||||||
path = download_image(url, config.media_dir)
|
if url not in downloads:
|
||||||
|
downloads[url] = download_image(url, config.media_dir)
|
||||||
|
path = downloads[url]
|
||||||
binds.append(path)
|
binds.append(path)
|
||||||
print(f"Downloaded {path}")
|
print(f"Downloaded {path}")
|
||||||
post = results.fetchone()
|
post = results.fetchone()
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue