[Migration] Get Podcasts / Episodes from PostgreSQL
[mygpo.git] / mygpo / db / couchdb / episode.py
blob1655338e5f5d2fe1a6a807f5a56c09bca839793d
1 from hashlib import sha1
2 from datetime import datetime
3 from collections import Counter
5 from couchdbkit import MultipleResultsFound
7 from django.core.cache import cache
9 from mygpo.podcasts.models import Podcast
10 from mygpo.core.models import Episode, MergedIdException
11 from mygpo.core.signals import incomplete_obj
12 from mygpo.cache import cache_result
13 from mygpo.decorators import repeat_on_conflict
14 from mygpo.utils import get_timestamp
15 from mygpo.db import QueryParameterMissing
16 from mygpo.db.couchdb.utils import is_couchdb_id
17 from mygpo.db.couchdb import get_main_database, get_userdata_database, \
18 get_single_result
20 import logging
21 logger = logging.getLogger(__name__)
24 @cache_result(timeout=60*60)
25 def episode_by_id(episode_id, current_id=False):
27 if not episode_id:
28 raise QueryParameterMissing('episode_id')
30 db = get_main_database()
32 episode = get_single_result(db, 'episodes/by_id',
33 key = episode_id,
34 include_docs = True,
35 schema = Episode,
38 if not episode:
39 return None
41 if current_id and episode._id != episode_id:
42 raise MergedIdException(episode, episode._id)
44 if episode.needs_update:
45 incomplete_obj.send_robust(sender=episode)
47 return episode
50 @cache_result(timeout=60*60)
51 def episodes_by_id(episode_ids):
53 if episode_ids is None:
54 raise QueryParameterMissing('episode_ids')
56 if not episode_ids:
57 return []
59 r = Episode.view('episodes/by_id',
60 include_docs = True,
61 keys = episode_ids,
64 episodes = list(r)
66 for episode in episodes:
67 if episode.needs_update:
68 incomplete_obj.send_robust(sender=episode)
70 return episodes
73 def episode_for_podcast_url(podcast_url, episode_url, create=False):
75 if not podcast_url:
76 raise QueryParameterMissing('podcast_url')
78 if not episode_url:
79 raise QueryParameterMissing('episode_url')
82 if create:
83 podcast = Podcast.objects.get_or_create_for_url(podcast_url)
85 else:
86 try:
87 podcast = Podcast.objects.get(urls__url=podcast_url)
88 except Podcast.DoesNotExist:
89 # podcast does not exist and should not be created
90 return None
92 return episode_for_podcast_id_url(podcast.id, episode_url, create)
95 def episode_for_podcast_id_url(podcast_id, episode_url, create=False):
97 if not podcast_id:
98 raise QueryParameterMissing('podcast_id')
100 if not episode_url:
101 raise QueryParameterMissing('episode_url')
104 key = u'episode-podcastid-%s-url-%s' % (
105 sha1(podcast_id.encode('utf-8')).hexdigest(),
106 sha1(episode_url.encode('utf-8')).hexdigest())
108 # Disabled as cache invalidation is not working properly
109 # episode = cache.get(key)
110 # if episode:
111 # return episode
113 db = get_main_database()
114 episode = get_single_result(db, 'episodes/by_podcast_url',
115 key = [podcast_id, episode_url],
116 include_docs = True,
117 reduce = False,
118 schema = Episode,
121 if episode:
122 if episode.needs_update:
123 incomplete_obj.send_robust(sender=episode)
124 else:
125 cache.set(key, episode)
126 return episode
128 if create:
129 episode = Episode()
130 episode.created_timestamp = get_timestamp(datetime.utcnow())
131 episode.podcast = podcast_id
132 episode.urls = [episode_url]
133 episode.save()
134 incomplete_obj.send_robust(sender=episode)
135 return episode
137 return None
140 @cache_result(timeout=60*60)
141 def episode_count():
142 db = get_main_database()
143 r = get_single_result(db, 'episodes/by_podcast',
144 reduce = True,
145 stale = 'update_after',
147 return r['value'] if r else 0
150 def episodes_to_dict(ids, use_cache=False):
152 if ids is None:
153 raise QueryParameterMissing('ids')
155 if not ids:
156 return {}
159 ids = list(set(ids))
160 objs = dict()
162 cache_objs = []
163 if use_cache:
164 res = cache.get_many(ids)
165 cache_objs.extend(res.values())
166 ids = [x for x in ids if x not in res.keys()]
168 db_objs = list(episodes_by_id(ids))
170 for obj in (cache_objs + db_objs):
172 # get_multi returns dict {'key': _id, 'error': 'not found'}
173 # for non-existing objects
174 if isinstance(obj, dict) and 'error' in obj:
175 _id = obj['key']
176 objs[_id] = None
177 continue
179 for i in obj.get_ids():
180 objs[i] = obj
182 if use_cache:
183 cache.set_many(dict( (obj._id, obj) for obj in db_objs))
185 return objs
188 def episodes_for_podcast_current(podcast, limit=None):
190 if not podcast:
191 raise QueryParameterMissing('podcast')
193 res = Episode.view('episodes/by_podcast_current',
194 startkey = podcast.get_id(),
195 endkey = podcast.get_id(),
196 include_docs = True,
197 limit = limit,
200 episodes = list(res)
202 for episode in episodes:
203 if episode.needs_update:
204 incomplete_obj.send_robust(sender=episode)
206 return episodes
210 def episodes_for_podcast_uncached(podcast, since=None, until={}, **kwargs):
212 if not podcast:
213 raise QueryParameterMissing('podcast')
216 if kwargs.get('descending', False):
217 since, until = until, since
219 if isinstance(since, datetime):
220 since = since.isoformat()
222 if isinstance(until, datetime):
223 until = until.isoformat()
225 res = Episode.view('episodes/by_podcast',
226 startkey = [podcast.get_id(), since],
227 endkey = [podcast.get_id(), until],
228 include_docs = True,
229 reduce = False,
230 **kwargs
233 episodes = list(res)
235 for episode in episodes:
236 if episode.needs_update:
237 incomplete_obj.send_robust(sender=episode)
239 return episodes
242 episodes_for_podcast = cache_result(timeout=60*60)(episodes_for_podcast_uncached)
245 @cache_result(timeout=60*60)
246 def episode_count_for_podcast(podcast, since=None, until={}, **kwargs):
248 if not podcast:
249 raise QueryParameterMissing('podcast')
252 if kwargs.get('descending', False):
253 since, until = until, since
255 if isinstance(since, datetime):
256 since = since.isoformat()
258 if isinstance(until, datetime):
259 until = until.isoformat()
261 db = get_main_database()
262 res = get_single_result(db, 'episodes/by_podcast',
263 startkey = [podcast.get_id(), since],
264 endkey = [podcast.get_id(), until],
265 reduce = True,
266 group_level = 1,
267 **kwargs
270 return res['value'] if res else 0
273 def favorite_episode_ids_for_user(user):
275 if not user:
276 raise QueryParameterMissing('user')
278 udb = get_userdata_database()
279 favorites = udb.view('favorites/episodes_by_user',
280 key = user._id,
283 return set(x['value']['_id'] for x in favorites)
286 def favorite_episodes_for_user(user):
287 episode_ids = list(favorite_episode_ids_for_user(user))
288 return episodes_by_id(episode_ids)
291 def chapters_for_episode(episode_id):
293 if not episode_id:
294 raise QueryParameterMissing('episode_id')
296 udb = get_userdata_database()
297 r = udb.view('chapters/by_episode',
298 startkey = [episode_id, None],
299 endkey = [episode_id, {}],
302 return map(_wrap_chapter, r)
305 def filetype_stats():
306 """ Returns a filetype counter over all episodes """
308 db = get_main_database()
309 r = db.view('episode_stats/filetypes',
310 stale = 'update_after',
311 reduce = True,
312 group_level = 1,
315 return Counter({x['key']: x['value'] for x in r})
318 def _wrap_chapter(res):
319 from mygpo.users.models import Chapter
320 user = res['key'][1]
321 chapter = Chapter.wrap(res['value'])
322 udb = get_userdata_database()
323 chapter.set_db(udb)
324 return (user, chapter)
327 @repeat_on_conflict(['episode'])
328 def set_episode_slug(episode, slug):
329 """ sets slug as new main slug of the episode, moves other to merged """
330 episode.set_slug(slug)
331 episode.save()
334 @repeat_on_conflict(['episode'])
335 def remove_episode_slug(episode, slug):
336 """ removes slug from main and merged slugs """
337 episode.remove_slug(slug)
338 episode.save()
341 @repeat_on_conflict(['episode_state'])
342 def set_episode_favorite(episode_state, is_fav):
343 udb = get_userdata_database()
344 episode_state.set_favorite(is_fav)
345 udb.save_doc(episode_state)
348 @repeat_on_conflict(['episode'])
349 def set_episode_listeners(episode, listeners):
351 if episode.listeners == listeners:
352 return False
354 episode.listeners = listeners
356 db = get_main_database()
357 db.save_doc(episode)
358 return True