aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--youtube/subscriptions.py77
1 files changed, 53 insertions, 24 deletions
diff --git a/youtube/subscriptions.py b/youtube/subscriptions.py
index b5f3c83..04d3c5a 100644
--- a/youtube/subscriptions.py
+++ b/youtube/subscriptions.py
@@ -108,8 +108,7 @@ def _subscribe(channels):
with connection as cursor:
channel_ids_to_check = [channel[0] for channel in channels if not _is_subscribed(cursor, channel[0])]
- rows = ((channel_id, channel_name, 0, 0) for channel_id,
- channel_name in channels)
+ rows = ((channel_id, channel_name, 0, 0) for channel_id, channel_name in channels)
cursor.executemany('''INSERT OR IGNORE INTO subscribed_channels (yt_channel_id, channel_name, time_last_checked, next_check_time)
VALUES (?, ?, ?, ?)''', rows)
@@ -236,8 +235,7 @@ def _get_channel_names(cursor, channel_ids):
return result
-def _channels_with_tag(cursor, tag, order=False, exclude_muted=False,
- include_muted_status=False):
+def _channels_with_tag(cursor, tag, order=False, exclude_muted=False, include_muted_status=False):
''' returns list of (channel_id, channel_name) '''
statement = '''SELECT yt_channel_id, channel_name'''
@@ -434,8 +432,10 @@ def autocheck_setting_changed(old_value, new_value):
stop_autocheck_system()
-settings.add_setting_changed_hook('autocheck_subscriptions',
- autocheck_setting_changed)
+settings.add_setting_changed_hook(
+ 'autocheck_subscriptions',
+ autocheck_setting_changed
+)
if settings.autocheck_subscriptions:
start_autocheck_system()
# ----------------------------
@@ -574,7 +574,6 @@ def _get_upstream_videos(channel_id):
video_item['time_published'] = youtube_timestamp_to_posix(video_item['time_published']) - i # subtract a few seconds off the videos so they will be in the right order
except Exception:
print(video_item)
-
else:
video_item['is_time_published_exact'] = False
video_item['time_published'] = None
@@ -601,7 +600,6 @@ def _get_upstream_videos(channel_id):
# 1 month between videos
videos[i]['time_published'] = now - i*3600*24*30
- video_item['channel_id'] = channel_id
if len(videos) == 0:
average_upload_period = 4*7*24*3600 # assume 1 month for channel with no videos
@@ -620,26 +618,31 @@ def _get_upstream_videos(channel_id):
with open_database() as connection:
with connection as cursor:
- # calculate how many new videos there are
- existing_vids = set(row[0] for row in cursor.execute(
- '''SELECT video_id
+ # Get video ids and duration of existing vids so we
+ # can see how many new ones there are and update
+ # livestreams/premiers
+ existing_vids = list(cursor.execute(
+ '''SELECT video_id, duration
FROM videos
INNER JOIN subscribed_channels
ON videos.sql_channel_id = subscribed_channels.id
WHERE yt_channel_id=?
ORDER BY time_published DESC
LIMIT 30''', [channel_id]).fetchall())
+ existing_vid_ids = set(row[0] for row in existing_vids)
+ existing_durs = dict(existing_vids)
# new videos the channel has uploaded since last time we checked
number_of_new_videos = 0
for video in videos:
- if video['id'] in existing_vids:
+ if video['id'] in existing_vid_ids:
break
number_of_new_videos += 1
is_first_check = cursor.execute('''SELECT time_last_checked FROM subscribed_channels WHERE yt_channel_id=?''', [channel_id]).fetchone()[0] in (None, 0)
time_videos_retrieved = int(time.time())
rows = []
+ update_rows = []
for i, video_item in enumerate(videos):
if (is_first_check
or number_of_new_videos > 6
@@ -655,16 +658,34 @@ def _get_upstream_videos(channel_id):
time_noticed = video_item['time_published']
else:
time_noticed = time_videos_retrieved
- rows.append((
- video_item['channel_id'],
- video_item['id'],
- video_item['title'],
- video_item['duration'],
- video_item['time_published'],
- video_item['is_time_published_exact'],
- time_noticed,
- video_item['description'],
- ))
+
+ # videos which need durations updated
+ non_durations = ('upcoming', 'none', 'live', '')
+ v_id = video_item['id']
+ if (existing_durs.get(v_id) is not None
+ and existing_durs[v_id].lower() in non_durations
+ and video_item['duration'] not in non_durations
+ ):
+ update_rows.append((
+ video_item['title'],
+ video_item['duration'],
+ video_item['time_published'],
+ video_item['is_time_published_exact'],
+ video_item['description'],
+ video_item['id'],
+ ))
+ # all other videos
+ else:
+ rows.append((
+ video_item['channel_id'],
+ video_item['id'],
+ video_item['title'],
+ video_item['duration'],
+ video_item['time_published'],
+ video_item['is_time_published_exact'],
+ time_noticed,
+ video_item['description'],
+ ))
cursor.executemany('''INSERT OR IGNORE INTO videos (
sql_channel_id,
@@ -677,6 +698,13 @@ def _get_upstream_videos(channel_id):
description
)
VALUES ((SELECT id FROM subscribed_channels WHERE yt_channel_id=?), ?, ?, ?, ?, ?, ?, ?)''', rows)
+ cursor.executemany('''UPDATE videos SET
+ title=?,
+ duration=?,
+ time_published=?,
+ is_time_published_exact=?,
+ description=?
+ WHERE video_id=?''', update_rows)
cursor.execute('''UPDATE subscribed_channels
SET time_last_checked = ?, next_check_time = ?
WHERE yt_channel_id=?''', [int(time.time()), next_check_time, channel_id])
@@ -809,7 +837,7 @@ def import_subscriptions():
error = 'Unsupported file format: ' + mime_type
error += (' . Only subscription.json, subscriptions.csv files'
' (from Google Takeouts)'
- ' and XML OPML files exported from Youtube\'s'
+ ' and XML OPML files exported from YouTube\'s'
' subscription manager page are supported')
return (flask.render_template('error.html', error_message=error),
400)
@@ -1004,7 +1032,8 @@ def get_subscriptions_page():
'muted': muted,
})
- return flask.render_template('subscriptions.html',
+ return flask.render_template(
+ 'subscriptions.html',
header_playlist_names=local_playlist.get_playlist_names(),
videos=videos,
num_pages=math.ceil(number_of_videos_in_db/60),