enable django.contrib.{auth,admin}
[mygpo.git] / mygpo / maintenance / migrate.py
blob4b9cd38fcf7f8251dbfaa160d84d2cf8374e73e4
1 from __future__ import unicode_literals
3 from mygpo.core.models import Podcast as P, Episode as E
4 from django.contrib.contenttypes.models import ContentType
5 from django.db import transaction, IntegrityError
6 from django.utils.text import slugify
7 import json
8 from datetime import datetime
9 from mygpo.podcasts.models import Podcast, Episode, URL, Slug, Tag, MergedUUID
11 import logging
12 logger = logging.getLogger(__name__)
13 #p = P.wrap(json.load(open('tmp.txt')))
16 def migrate_episode(e):
18 podcast, created = Podcast.objects.get_or_create(id=e.podcast, defaults={
19 'created': datetime.utcnow(),
22 if created:
23 logger.info('Created stub for podcast %s', e.podcast)
25 e2, created = Episode.objects.update_or_create(id=e._id, defaults = {
26 'title': e.title or '',
27 'guid': e.guid,
28 'description': e.description or '',
29 'subtitle': e.subtitle or '',
30 'content': e.content or '',
31 'link': e.link,
32 'released': e.released,
33 'author': e.author,
34 'duration': max(0, e.duration) if e.duration is not None else None,
35 'filesize': max(0, e.filesize) if e.filesize is not None else None,
36 'language': e.language[:10] if e.language is not None else None,
37 'last_update': e.last_update,
38 'outdated': e.outdated,
39 'mimetypes': ','.join(e.mimetypes),
40 'listeners': max(0, e.listeners) if e.listeners is not None else None,
41 'content_types': ','.join(e.content_types),
42 'flattr_url': e.flattr_url,
43 'created': datetime.fromtimestamp(e.created_timestamp) if e.created_timestamp else datetime.utcnow(),
44 'license': e.license,
45 'podcast': podcast,
48 update_urls(e, e2, None)
49 update_slugs(e, e2, None)
50 update_ids(e, e2)
54 def migrate_podcast(p):
55 logger.info('Migrating podcast %r', p)
56 import time
57 time.sleep(5)
59 p2, created = Podcast.objects.update_or_create(id=p._id, defaults = {
60 'title': p.title or '',
61 'subtitle': p.subtitle or '',
62 'description': p.description or '',
63 'link': p.link,
64 'language': p.language,
65 'created': datetime.fromtimestamp(p.created_timestamp) if p.created_timestamp else datetime.utcnow(),
66 'last_update': p.last_update,
67 'license': p.license,
68 'flattr_url': p.flattr_url,
69 'outdated': p.outdated,
70 'author': p.author,
71 'logo_url': p.logo_url,
72 'common_episode_title': p.common_episode_title or '',
73 'new_location': p.new_location,
74 'latest_episode_timestamp': p.latest_episode_timestamp,
75 'episode_count': p.episode_count or 0,
76 'hub': p.hub,
77 'content_types': ','.join(p.content_types),
78 'restrictions': ','.join(p.restrictions),
79 'twitter': getattr(p, 'twitter', None),
82 update_urls(p, p2, None)
83 update_slugs(p, p2, None)
84 update_tags(p, p2)
85 update_ids(p, p2)
87 time.sleep(10)
88 return p2
91 @transaction.atomic
92 def update_urls(old, new, scope):
94 existing_urls = {u.url: u for u in new.urls.all()}
95 for n, url in enumerate(old.urls):
96 try:
97 u = existing_urls.pop(url)
98 u.order = n
99 u.save()
100 except KeyError:
101 try:
102 URL.objects.create(url=url, content_object=new, order=n, scope=scope)
103 except IntegrityError as ie:
104 logger.warn('Could not create URL for %s: %s', new, ie)
106 delete = [u.pk for u in existing_urls]
108 logger.info('Deleting %d URLs', len(delete))
109 URL.objects.filter(id__in=delete).delete()
112 @transaction.atomic
113 def update_slugs(old, new, scope):
115 existing_slugs = {s.slug: s for s in new.slugs.all()}
116 logger.info('%d existing slugs', len(existing_slugs))
118 new_slugs = filter(None, [old.slug] + old.merged_slugs + [old.oldid] + old.merged_oldids)
119 new_slugs = map(unicode, new_slugs)
120 new_slugs = map(slugify, new_slugs)
121 logger.info('%d new slugs', len(new_slugs))
123 max_length = Slug._meta.get_field('slug').max_length
125 max_order = max([s.order for s in existing_slugs.values()] + [len(new_slugs)])
126 logger.info('Renumbering slugs starting from %d', max_order)
127 for n, slug in enumerate(existing_slugs.values(), max_order+1):
128 slug.order = n
129 slug.save()
131 logger.info('%d existing slugs', len(existing_slugs))
133 for n, slug in enumerate(new_slugs):
134 try:
135 s = existing_slugs.pop(slug)
136 logger.info('Updating new slug %d: %s', n, slug)
137 s.order = n
138 s.save()
139 except KeyError:
140 logger.info('Creating new slug %d: %s', n, slug)
141 try:
142 Slug.objects.create(slug=slug[:max_length], content_object=new,
143 order=n, scope=scope)
144 except IntegrityError as ie:
145 logger.warn('Could not create Slug for %s: %s', new, ie)
149 delete = [s.pk for s in existing_slugs.values()]
150 Slug.objects.filter(id__in=delete).delete()
153 @transaction.atomic
154 def update_tags(old, new):
155 # TODO: delete?
156 for tag in old.tags.get('feed', []):
157 t, created = Tag.objects.get_or_create(
158 tag=tag,
159 source=Tag.FEED,
160 content_type=ContentType.objects.get_for_model(new),
161 object_id=new.pk,
165 @transaction.atomic
166 def update_ids(old, new):
167 # TODO: delete?
168 for mid in old.merged_ids:
169 u, created = MergedUUID.objects.get_or_create(
170 uuid = mid,
171 content_type=ContentType.objects.get_for_model(new),
172 object_id=new.pk,
176 from couchdbkit import Database
177 db = Database('http://127.0.0.1:6984/mygpo_core_copy')
178 from couchdbkit.changes import ChangesStream, fold, foreach
181 MIGRATIONS = {
182 'Podcast': (P, migrate_podcast),
183 'Episode': (E, migrate_episode),
184 'PodcastSubscriberData': (None, None),
185 'EmailMessage': (None, None),
186 'ExamplePodcasts': (None, None),
189 def migrate_change(c):
190 logger.info('Migrate seq %s', c['seq'])
191 doctype = c['doc']['doc_type']
193 cls, migrate = MIGRATIONS[doctype]
195 if cls is None:
196 return
198 obj = cls.wrap(c['doc'])
199 migrate(obj)
202 def migrate(since=592054):
203 with ChangesStream(db,
204 feed="continuous",
205 heartbeat=True,
206 include_docs=True,
207 since=since,
208 ) as stream:
209 for change in stream:
210 migrate_change(change)