Fix the token issues in the migration
[larjonas-mediagoblin.git] / mediagoblin / db / migrations.py
blob74c1194fe078b4aa875596083d91dd03c22617dd
1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 import datetime
18 import uuid
20 import six
22 if six.PY2:
23 import migrate
25 import pytz
26 import dateutil.tz
27 from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
28 Integer, Unicode, UnicodeText, DateTime,
29 ForeignKey, Date, Index)
30 from sqlalchemy.exc import ProgrammingError
31 from sqlalchemy.ext.declarative import declarative_base
32 from sqlalchemy.sql import and_
33 from sqlalchemy.schema import UniqueConstraint
35 from mediagoblin.db.extratypes import JSONEncoded, MutationDict
36 from mediagoblin.db.migration_tools import (
37 RegisterMigration, inspect_table, replace_table_hack)
38 from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
39 Privilege, Generator)
40 from mediagoblin.db.extratypes import JSONEncoded, MutationDict
43 MIGRATIONS = {}
46 @RegisterMigration(1, MIGRATIONS)
47 def ogg_to_webm_audio(db_conn):
48 metadata = MetaData(bind=db_conn.bind)
50 file_keynames = Table('core__file_keynames', metadata, autoload=True,
51 autoload_with=db_conn.bind)
53 db_conn.execute(
54 file_keynames.update().where(file_keynames.c.name == 'ogg').
55 values(name='webm_audio')
57 db_conn.commit()
60 @RegisterMigration(2, MIGRATIONS)
61 def add_wants_notification_column(db_conn):
62 metadata = MetaData(bind=db_conn.bind)
64 users = Table('core__users', metadata, autoload=True,
65 autoload_with=db_conn.bind)
67 col = Column('wants_comment_notification', Boolean,
68 default=True, nullable=True)
69 col.create(users, populate_defaults=True)
70 db_conn.commit()
73 @RegisterMigration(3, MIGRATIONS)
74 def add_transcoding_progress(db_conn):
75 metadata = MetaData(bind=db_conn.bind)
77 media_entry = inspect_table(metadata, 'core__media_entries')
79 col = Column('transcoding_progress', SmallInteger)
80 col.create(media_entry)
81 db_conn.commit()
84 class Collection_v0(declarative_base()):
85 __tablename__ = "core__collections"
87 id = Column(Integer, primary_key=True)
88 title = Column(Unicode, nullable=False)
89 slug = Column(Unicode)
90 created = Column(DateTime, nullable=False, default=datetime.datetime.now,
91 index=True)
92 description = Column(UnicodeText)
93 creator = Column(Integer, ForeignKey(User.id), nullable=False)
94 items = Column(Integer, default=0)
96 class CollectionItem_v0(declarative_base()):
97 __tablename__ = "core__collection_items"
99 id = Column(Integer, primary_key=True)
100 media_entry = Column(
101 Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
102 collection = Column(Integer, ForeignKey(Collection.id), nullable=False)
103 note = Column(UnicodeText, nullable=True)
104 added = Column(DateTime, nullable=False, default=datetime.datetime.now)
105 position = Column(Integer)
107 ## This should be activated, normally.
108 ## But this would change the way the next migration used to work.
109 ## So it's commented for now.
110 __table_args__ = (
111 UniqueConstraint('collection', 'media_entry'),
114 collectionitem_unique_constraint_done = False
116 @RegisterMigration(4, MIGRATIONS)
117 def add_collection_tables(db_conn):
118 Collection_v0.__table__.create(db_conn.bind)
119 CollectionItem_v0.__table__.create(db_conn.bind)
121 global collectionitem_unique_constraint_done
122 collectionitem_unique_constraint_done = True
124 db_conn.commit()
127 @RegisterMigration(5, MIGRATIONS)
128 def add_mediaentry_collected(db_conn):
129 metadata = MetaData(bind=db_conn.bind)
131 media_entry = inspect_table(metadata, 'core__media_entries')
133 col = Column('collected', Integer, default=0)
134 col.create(media_entry)
135 db_conn.commit()
138 class ProcessingMetaData_v0(declarative_base()):
139 __tablename__ = 'core__processing_metadata'
141 id = Column(Integer, primary_key=True)
142 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False,
143 index=True)
144 callback_url = Column(Unicode)
146 @RegisterMigration(6, MIGRATIONS)
147 def create_processing_metadata_table(db):
148 ProcessingMetaData_v0.__table__.create(db.bind)
149 db.commit()
152 # Okay, problem being:
153 # Migration #4 forgot to add the uniqueconstraint for the
154 # new tables. While creating the tables from scratch had
155 # the constraint enabled.
157 # So we have four situations that should end up at the same
158 # db layout:
160 # 1. Fresh install.
161 # Well, easy. Just uses the tables in models.py
162 # 2. Fresh install using a git version just before this migration
163 # The tables are all there, the unique constraint is also there.
164 # This migration should do nothing.
165 # But as we can't detect the uniqueconstraint easily,
166 # this migration just adds the constraint again.
167 # And possibly fails very loud. But ignores the failure.
168 # 3. old install, not using git, just releases.
169 # This one will get the new tables in #4 (now with constraint!)
170 # And this migration is just skipped silently.
171 # 4. old install, always on latest git.
172 # This one has the tables, but lacks the constraint.
173 # So this migration adds the constraint.
174 @RegisterMigration(7, MIGRATIONS)
175 def fix_CollectionItem_v0_constraint(db_conn):
176 """Add the forgotten Constraint on CollectionItem"""
178 global collectionitem_unique_constraint_done
179 if collectionitem_unique_constraint_done:
180 # Reset it. Maybe the whole thing gets run again
181 # For a different db?
182 collectionitem_unique_constraint_done = False
183 return
185 metadata = MetaData(bind=db_conn.bind)
187 CollectionItem_table = inspect_table(metadata, 'core__collection_items')
189 constraint = UniqueConstraint('collection', 'media_entry',
190 name='core__collection_items_collection_media_entry_key',
191 table=CollectionItem_table)
193 try:
194 constraint.create()
195 except ProgrammingError:
196 # User probably has an install that was run since the
197 # collection tables were added, so we don't need to run this migration.
198 pass
200 db_conn.commit()
203 @RegisterMigration(8, MIGRATIONS)
204 def add_license_preference(db):
205 metadata = MetaData(bind=db.bind)
207 user_table = inspect_table(metadata, 'core__users')
209 col = Column('license_preference', Unicode)
210 col.create(user_table)
211 db.commit()
214 @RegisterMigration(9, MIGRATIONS)
215 def mediaentry_new_slug_era(db):
217 Update for the new era for media type slugs.
219 Entries without slugs now display differently in the url like:
220 /u/cwebber/m/id=251/
222 ... because of this, we should back-convert:
223 - entries without slugs should be converted to use the id, if possible, to
224 make old urls still work
225 - slugs with = (or also : which is now also not allowed) to have those
226 stripped out (small possibility of breakage here sadly)
229 def slug_and_user_combo_exists(slug, uploader):
230 return db.execute(
231 media_table.select(
232 and_(media_table.c.uploader==uploader,
233 media_table.c.slug==slug))).first() is not None
235 def append_garbage_till_unique(row, new_slug):
237 Attach junk to this row until it's unique, then save it
239 if slug_and_user_combo_exists(new_slug, row.uploader):
240 # okay, still no success;
241 # let's whack junk on there till it's unique.
242 new_slug += '-' + uuid.uuid4().hex[:4]
243 # keep going if necessary!
244 while slug_and_user_combo_exists(new_slug, row.uploader):
245 new_slug += uuid.uuid4().hex[:4]
247 db.execute(
248 media_table.update(). \
249 where(media_table.c.id==row.id). \
250 values(slug=new_slug))
252 metadata = MetaData(bind=db.bind)
254 media_table = inspect_table(metadata, 'core__media_entries')
256 for row in db.execute(media_table.select()):
257 # no slug, try setting to an id
258 if not row.slug:
259 append_garbage_till_unique(row, six.text_type(row.id))
260 # has "=" or ":" in it... we're getting rid of those
261 elif u"=" in row.slug or u":" in row.slug:
262 append_garbage_till_unique(
263 row, row.slug.replace(u"=", u"-").replace(u":", u"-"))
265 db.commit()
268 @RegisterMigration(10, MIGRATIONS)
269 def unique_collections_slug(db):
270 """Add unique constraint to collection slug"""
271 metadata = MetaData(bind=db.bind)
272 collection_table = inspect_table(metadata, "core__collections")
273 existing_slugs = {}
274 slugs_to_change = []
276 for row in db.execute(collection_table.select()):
277 # if duplicate slug, generate a unique slug
278 if row.creator in existing_slugs and row.slug in \
279 existing_slugs[row.creator]:
280 slugs_to_change.append(row.id)
281 else:
282 if not row.creator in existing_slugs:
283 existing_slugs[row.creator] = [row.slug]
284 else:
285 existing_slugs[row.creator].append(row.slug)
287 for row_id in slugs_to_change:
288 new_slug = six.text_type(uuid.uuid4())
289 db.execute(collection_table.update().
290 where(collection_table.c.id == row_id).
291 values(slug=new_slug))
292 # sqlite does not like to change the schema when a transaction(update) is
293 # not yet completed
294 db.commit()
296 constraint = UniqueConstraint('creator', 'slug',
297 name='core__collection_creator_slug_key',
298 table=collection_table)
299 constraint.create()
301 db.commit()
303 @RegisterMigration(11, MIGRATIONS)
304 def drop_token_related_User_columns(db):
306 Drop unneeded columns from the User table after switching to using
307 itsdangerous tokens for email and forgot password verification.
309 metadata = MetaData(bind=db.bind)
310 user_table = inspect_table(metadata, 'core__users')
312 verification_key = user_table.columns['verification_key']
313 fp_verification_key = user_table.columns['fp_verification_key']
314 fp_token_expire = user_table.columns['fp_token_expire']
316 verification_key.drop()
317 fp_verification_key.drop()
318 fp_token_expire.drop()
320 db.commit()
323 class CommentSubscription_v0(declarative_base()):
324 __tablename__ = 'core__comment_subscriptions'
325 id = Column(Integer, primary_key=True)
327 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
329 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
331 user_id = Column(Integer, ForeignKey(User.id), nullable=False)
333 notify = Column(Boolean, nullable=False, default=True)
334 send_email = Column(Boolean, nullable=False, default=True)
337 class Notification_v0(declarative_base()):
338 __tablename__ = 'core__notifications'
339 id = Column(Integer, primary_key=True)
340 type = Column(Unicode)
342 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
344 user_id = Column(Integer, ForeignKey(User.id), nullable=False,
345 index=True)
346 seen = Column(Boolean, default=lambda: False, index=True)
349 class CommentNotification_v0(Notification_v0):
350 __tablename__ = 'core__comment_notifications'
351 id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
353 subject_id = Column(Integer, ForeignKey(MediaComment.id))
356 class ProcessingNotification_v0(Notification_v0):
357 __tablename__ = 'core__processing_notifications'
359 id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
361 subject_id = Column(Integer, ForeignKey(MediaEntry.id))
364 @RegisterMigration(12, MIGRATIONS)
365 def add_new_notification_tables(db):
366 metadata = MetaData(bind=db.bind)
368 user_table = inspect_table(metadata, 'core__users')
369 mediaentry_table = inspect_table(metadata, 'core__media_entries')
370 mediacomment_table = inspect_table(metadata, 'core__media_comments')
372 CommentSubscription_v0.__table__.create(db.bind)
374 Notification_v0.__table__.create(db.bind)
375 CommentNotification_v0.__table__.create(db.bind)
376 ProcessingNotification_v0.__table__.create(db.bind)
378 db.commit()
381 @RegisterMigration(13, MIGRATIONS)
382 def pw_hash_nullable(db):
383 """Make pw_hash column nullable"""
384 metadata = MetaData(bind=db.bind)
385 user_table = inspect_table(metadata, "core__users")
387 user_table.c.pw_hash.alter(nullable=True)
389 # sqlite+sqlalchemy seems to drop this constraint during the
390 # migration, so we add it back here for now a bit manually.
391 if db.bind.url.drivername == 'sqlite':
392 constraint = UniqueConstraint('username', table=user_table)
393 constraint.create()
395 db.commit()
398 # oauth1 migrations
399 class Client_v0(declarative_base()):
401 Model representing a client - Used for API Auth
403 __tablename__ = "core__clients"
405 id = Column(Unicode, nullable=True, primary_key=True)
406 secret = Column(Unicode, nullable=False)
407 expirey = Column(DateTime, nullable=True)
408 application_type = Column(Unicode, nullable=False)
409 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
410 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
412 # optional stuff
413 redirect_uri = Column(JSONEncoded, nullable=True)
414 logo_url = Column(Unicode, nullable=True)
415 application_name = Column(Unicode, nullable=True)
416 contacts = Column(JSONEncoded, nullable=True)
418 def __repr__(self):
419 if self.application_name:
420 return "<Client {0} - {1}>".format(self.application_name, self.id)
421 else:
422 return "<Client {0}>".format(self.id)
424 class RequestToken_v0(declarative_base()):
426 Model for representing the request tokens
428 __tablename__ = "core__request_tokens"
430 token = Column(Unicode, primary_key=True)
431 secret = Column(Unicode, nullable=False)
432 client = Column(Unicode, ForeignKey(Client_v0.id))
433 user = Column(Integer, ForeignKey(User.id), nullable=True)
434 used = Column(Boolean, default=False)
435 authenticated = Column(Boolean, default=False)
436 verifier = Column(Unicode, nullable=True)
437 callback = Column(Unicode, nullable=False, default=u"oob")
438 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
439 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
441 class AccessToken_v0(declarative_base()):
443 Model for representing the access tokens
445 __tablename__ = "core__access_tokens"
447 token = Column(Unicode, nullable=False, primary_key=True)
448 secret = Column(Unicode, nullable=False)
449 user = Column(Integer, ForeignKey(User.id))
450 request_token = Column(Unicode, ForeignKey(RequestToken_v0.token))
451 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
452 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
455 class NonceTimestamp_v0(declarative_base()):
457 A place the timestamp and nonce can be stored - this is for OAuth1
459 __tablename__ = "core__nonce_timestamps"
461 nonce = Column(Unicode, nullable=False, primary_key=True)
462 timestamp = Column(DateTime, nullable=False, primary_key=True)
465 @RegisterMigration(14, MIGRATIONS)
466 def create_oauth1_tables(db):
467 """ Creates the OAuth1 tables """
469 Client_v0.__table__.create(db.bind)
470 RequestToken_v0.__table__.create(db.bind)
471 AccessToken_v0.__table__.create(db.bind)
472 NonceTimestamp_v0.__table__.create(db.bind)
474 db.commit()
476 @RegisterMigration(15, MIGRATIONS)
477 def wants_notifications(db):
478 """Add a wants_notifications field to User model"""
479 metadata = MetaData(bind=db.bind)
480 user_table = inspect_table(metadata, "core__users")
481 col = Column('wants_notifications', Boolean, default=True)
482 col.create(user_table)
483 db.commit()
487 @RegisterMigration(16, MIGRATIONS)
488 def upload_limits(db):
489 """Add user upload limit columns"""
490 metadata = MetaData(bind=db.bind)
492 user_table = inspect_table(metadata, 'core__users')
493 media_entry_table = inspect_table(metadata, 'core__media_entries')
495 col = Column('uploaded', Integer, default=0)
496 col.create(user_table)
498 col = Column('upload_limit', Integer)
499 col.create(user_table)
501 col = Column('file_size', Integer, default=0)
502 col.create(media_entry_table)
504 db.commit()
507 @RegisterMigration(17, MIGRATIONS)
508 def add_file_metadata(db):
509 """Add file_metadata to MediaFile"""
510 metadata = MetaData(bind=db.bind)
511 media_file_table = inspect_table(metadata, "core__mediafiles")
513 col = Column('file_metadata', MutationDict.as_mutable(JSONEncoded))
514 col.create(media_file_table)
516 db.commit()
518 ###################
519 # Moderation tables
520 ###################
522 class ReportBase_v0(declarative_base()):
523 __tablename__ = 'core__reports'
524 id = Column(Integer, primary_key=True)
525 reporter_id = Column(Integer, ForeignKey(User.id), nullable=False)
526 report_content = Column(UnicodeText)
527 reported_user_id = Column(Integer, ForeignKey(User.id), nullable=False)
528 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
529 discriminator = Column('type', Unicode(50))
530 resolver_id = Column(Integer, ForeignKey(User.id))
531 resolved = Column(DateTime)
532 result = Column(UnicodeText)
533 __mapper_args__ = {'polymorphic_on': discriminator}
536 class CommentReport_v0(ReportBase_v0):
537 __tablename__ = 'core__reports_on_comments'
538 __mapper_args__ = {'polymorphic_identity': 'comment_report'}
540 id = Column('id',Integer, ForeignKey('core__reports.id'),
541 primary_key=True)
542 comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
545 class MediaReport_v0(ReportBase_v0):
546 __tablename__ = 'core__reports_on_media'
547 __mapper_args__ = {'polymorphic_identity': 'media_report'}
549 id = Column('id',Integer, ForeignKey('core__reports.id'), primary_key=True)
550 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=True)
553 class UserBan_v0(declarative_base()):
554 __tablename__ = 'core__user_bans'
555 user_id = Column(Integer, ForeignKey(User.id), nullable=False,
556 primary_key=True)
557 expiration_date = Column(Date)
558 reason = Column(UnicodeText, nullable=False)
561 class Privilege_v0(declarative_base()):
562 __tablename__ = 'core__privileges'
563 id = Column(Integer, nullable=False, primary_key=True, unique=True)
564 privilege_name = Column(Unicode, nullable=False, unique=True)
567 class PrivilegeUserAssociation_v0(declarative_base()):
568 __tablename__ = 'core__privileges_users'
569 privilege_id = Column(
570 'core__privilege_id',
571 Integer,
572 ForeignKey(User.id),
573 primary_key=True)
574 user_id = Column(
575 'core__user_id',
576 Integer,
577 ForeignKey(Privilege.id),
578 primary_key=True)
581 PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':u'admin'},
582 {'privilege_name':u'moderator'},
583 {'privilege_name':u'uploader'},
584 {'privilege_name':u'reporter'},
585 {'privilege_name':u'commenter'},
586 {'privilege_name':u'active'}]
588 # vR1 stands for "version Rename 1". This only exists because we need
589 # to deal with dropping some booleans and it's otherwise impossible
590 # with sqlite.
592 class User_vR1(declarative_base()):
593 __tablename__ = 'rename__users'
594 id = Column(Integer, primary_key=True)
595 username = Column(Unicode, nullable=False, unique=True)
596 email = Column(Unicode, nullable=False)
597 pw_hash = Column(Unicode)
598 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
599 wants_comment_notification = Column(Boolean, default=True)
600 wants_notifications = Column(Boolean, default=True)
601 license_preference = Column(Unicode)
602 url = Column(Unicode)
603 bio = Column(UnicodeText) # ??
604 uploaded = Column(Integer, default=0)
605 upload_limit = Column(Integer)
608 @RegisterMigration(18, MIGRATIONS)
609 def create_moderation_tables(db):
611 # First, we will create the new tables in the database.
612 #--------------------------------------------------------------------------
613 ReportBase_v0.__table__.create(db.bind)
614 CommentReport_v0.__table__.create(db.bind)
615 MediaReport_v0.__table__.create(db.bind)
616 UserBan_v0.__table__.create(db.bind)
617 Privilege_v0.__table__.create(db.bind)
618 PrivilegeUserAssociation_v0.__table__.create(db.bind)
620 db.commit()
622 # Then initialize the tables that we will later use
623 #--------------------------------------------------------------------------
624 metadata = MetaData(bind=db.bind)
625 privileges_table= inspect_table(metadata, "core__privileges")
626 user_table = inspect_table(metadata, 'core__users')
627 user_privilege_assoc = inspect_table(
628 metadata, 'core__privileges_users')
630 # This section initializes the default Privilege foundations, that
631 # would be created through the FOUNDATIONS system in a new instance
632 #--------------------------------------------------------------------------
633 for parameters in PRIVILEGE_FOUNDATIONS_v0:
634 db.execute(privileges_table.insert().values(**parameters))
636 db.commit()
638 # This next section takes the information from the old is_admin and status
639 # columns and converts those to the new privilege system
640 #--------------------------------------------------------------------------
641 admin_users_ids, active_users_ids, inactive_users_ids = (
642 db.execute(
643 user_table.select().where(
644 user_table.c.is_admin==True)).fetchall(),
645 db.execute(
646 user_table.select().where(
647 user_table.c.is_admin==False).where(
648 user_table.c.status==u"active")).fetchall(),
649 db.execute(
650 user_table.select().where(
651 user_table.c.is_admin==False).where(
652 user_table.c.status!=u"active")).fetchall())
654 # Get the ids for each of the privileges so we can reference them ~~~~~~~~~
655 (admin_privilege_id, uploader_privilege_id,
656 reporter_privilege_id, commenter_privilege_id,
657 active_privilege_id) = [
658 db.execute(privileges_table.select().where(
659 privileges_table.c.privilege_name==privilege_name)).first()['id']
660 for privilege_name in
661 [u"admin",u"uploader",u"reporter",u"commenter",u"active"]
664 # Give each user the appopriate privileges depending whether they are an
665 # admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
666 for admin_user in admin_users_ids:
667 admin_user_id = admin_user['id']
668 for privilege_id in [admin_privilege_id, uploader_privilege_id,
669 reporter_privilege_id, commenter_privilege_id,
670 active_privilege_id]:
671 db.execute(user_privilege_assoc.insert().values(
672 core__privilege_id=admin_user_id,
673 core__user_id=privilege_id))
675 for active_user in active_users_ids:
676 active_user_id = active_user['id']
677 for privilege_id in [uploader_privilege_id, reporter_privilege_id,
678 commenter_privilege_id, active_privilege_id]:
679 db.execute(user_privilege_assoc.insert().values(
680 core__privilege_id=active_user_id,
681 core__user_id=privilege_id))
683 for inactive_user in inactive_users_ids:
684 inactive_user_id = inactive_user['id']
685 for privilege_id in [uploader_privilege_id, reporter_privilege_id,
686 commenter_privilege_id]:
687 db.execute(user_privilege_assoc.insert().values(
688 core__privilege_id=inactive_user_id,
689 core__user_id=privilege_id))
691 db.commit()
693 # And then, once the information is taken from is_admin & status columns
694 # we drop all of the vestigial columns from the User table.
695 #--------------------------------------------------------------------------
696 if db.bind.url.drivername == 'sqlite':
697 # SQLite has some issues that make it *impossible* to drop boolean
698 # columns. So, the following code is a very hacky workaround which
699 # makes it possible. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
701 User_vR1.__table__.create(db.bind)
702 db.commit()
703 new_user_table = inspect_table(metadata, 'rename__users')
704 replace_table_hack(db, user_table, new_user_table)
705 else:
706 # If the db is not run using SQLite, this process is much simpler ~~~~~
708 status = user_table.columns['status']
709 email_verified = user_table.columns['email_verified']
710 is_admin = user_table.columns['is_admin']
711 status.drop()
712 email_verified.drop()
713 is_admin.drop()
715 db.commit()
718 @RegisterMigration(19, MIGRATIONS)
719 def drop_MediaEntry_collected(db):
721 Drop unused MediaEntry.collected column
723 metadata = MetaData(bind=db.bind)
725 media_collected= inspect_table(metadata, 'core__media_entries')
726 media_collected = media_collected.columns['collected']
728 media_collected.drop()
730 db.commit()
733 @RegisterMigration(20, MIGRATIONS)
734 def add_metadata_column(db):
735 metadata = MetaData(bind=db.bind)
737 media_entry = inspect_table(metadata, 'core__media_entries')
739 col = Column('media_metadata', MutationDict.as_mutable(JSONEncoded),
740 default=MutationDict())
741 col.create(media_entry)
743 db.commit()
746 class PrivilegeUserAssociation_R1(declarative_base()):
747 __tablename__ = 'rename__privileges_users'
748 user = Column(
749 "user",
750 Integer,
751 ForeignKey(User.id),
752 primary_key=True)
753 privilege = Column(
754 "privilege",
755 Integer,
756 ForeignKey(Privilege.id),
757 primary_key=True)
759 @RegisterMigration(21, MIGRATIONS)
760 def fix_privilege_user_association_table(db):
762 There was an error in the PrivilegeUserAssociation table that allowed for a
763 dangerous sql error. We need to the change the name of the columns to be
764 unique, and properly referenced.
766 metadata = MetaData(bind=db.bind)
768 privilege_user_assoc = inspect_table(
769 metadata, 'core__privileges_users')
771 # This whole process is more complex if we're dealing with sqlite
772 if db.bind.url.drivername == 'sqlite':
773 PrivilegeUserAssociation_R1.__table__.create(db.bind)
774 db.commit()
776 new_privilege_user_assoc = inspect_table(
777 metadata, 'rename__privileges_users')
778 result = db.execute(privilege_user_assoc.select())
779 for row in result:
780 # The columns were improperly named before, so we switch the columns
781 user_id, priv_id = row['core__privilege_id'], row['core__user_id']
782 db.execute(new_privilege_user_assoc.insert().values(
783 user=user_id,
784 privilege=priv_id))
786 db.commit()
788 privilege_user_assoc.drop()
789 new_privilege_user_assoc.rename('core__privileges_users')
791 # much simpler if postgres though!
792 else:
793 privilege_user_assoc.c.core__user_id.alter(name="privilege")
794 privilege_user_assoc.c.core__privilege_id.alter(name="user")
796 db.commit()
799 @RegisterMigration(22, MIGRATIONS)
800 def add_index_username_field(db):
802 This migration has been found to be doing the wrong thing. See
803 the documentation in migration 23 (revert_username_index) below
804 which undoes this for those databases that did run this migration.
806 Old description:
807 This indexes the User.username field which is frequently queried
808 for example a user logging in. This solves the issue #894
810 ## This code is left commented out *on purpose!*
812 ## We do not normally allow commented out code like this in
813 ## MediaGoblin but this is a special case: since this migration has
814 ## been nullified but with great work to set things back below,
815 ## this is commented out for historical clarity.
817 # metadata = MetaData(bind=db.bind)
818 # user_table = inspect_table(metadata, "core__users")
820 # new_index = Index("ix_core__users_uploader", user_table.c.username)
821 # new_index.create()
823 # db.commit()
824 pass
827 @RegisterMigration(23, MIGRATIONS)
828 def revert_username_index(db):
830 Revert the stuff we did in migration 22 above.
832 There were a couple of problems with what we did:
833 - There was never a need for this migration! The unique
834 constraint had an implicit b-tree index, so it wasn't really
835 needed. (This is my (Chris Webber's) fault for suggesting it
836 needed to happen without knowing what's going on... my bad!)
837 - On top of that, databases created after the models.py was
838 changed weren't the same as those that had been run through
839 migration 22 above.
841 As such, we're setting things back to the way they were before,
842 but as it turns out, that's tricky to do!
844 metadata = MetaData(bind=db.bind)
845 user_table = inspect_table(metadata, "core__users")
846 indexes = dict(
847 [(index.name, index) for index in user_table.indexes])
849 # index from unnecessary migration
850 users_uploader_index = indexes.get(u'ix_core__users_uploader')
851 # index created from models.py after (unique=True, index=True)
852 # was set in models.py
853 users_username_index = indexes.get(u'ix_core__users_username')
855 if users_uploader_index is None and users_username_index is None:
856 # We don't need to do anything.
857 # The database isn't in a state where it needs fixing
859 # (ie, either went through the previous borked migration or
860 # was initialized with a models.py where core__users was both
861 # unique=True and index=True)
862 return
864 if db.bind.url.drivername == 'sqlite':
865 # Again, sqlite has problems. So this is tricky.
867 # Yes, this is correct to use User_vR1! Nothing has changed
868 # between the *correct* version of this table and migration 18.
869 User_vR1.__table__.create(db.bind)
870 db.commit()
871 new_user_table = inspect_table(metadata, 'rename__users')
872 replace_table_hack(db, user_table, new_user_table)
874 else:
875 # If the db is not run using SQLite, we don't need to do crazy
876 # table copying.
878 # Remove whichever of the not-used indexes are in place
879 if users_uploader_index is not None:
880 users_uploader_index.drop()
881 if users_username_index is not None:
882 users_username_index.drop()
884 # Given we're removing indexes then adding a unique constraint
885 # which *we know might fail*, thus probably rolling back the
886 # session, let's commit here.
887 db.commit()
889 try:
890 # Add the unique constraint
891 constraint = UniqueConstraint(
892 'username', table=user_table)
893 constraint.create()
894 except ProgrammingError:
895 # constraint already exists, no need to add
896 db.rollback()
898 db.commit()
900 class Generator_R0(declarative_base()):
901 __tablename__ = "core__generators"
902 id = Column(Integer, primary_key=True)
903 name = Column(Unicode, nullable=False)
904 published = Column(DateTime, nullable=False, default=datetime.datetime.now)
905 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
906 object_type = Column(Unicode, nullable=False)
908 class ActivityIntermediator_R0(declarative_base()):
909 __tablename__ = "core__activity_intermediators"
910 id = Column(Integer, primary_key=True)
911 type = Column(Unicode, nullable=False)
913 class Activity_R0(declarative_base()):
914 __tablename__ = "core__activities"
915 id = Column(Integer, primary_key=True)
916 actor = Column(Integer, ForeignKey(User.id), nullable=False)
917 published = Column(DateTime, nullable=False, default=datetime.datetime.now)
918 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
919 verb = Column(Unicode, nullable=False)
920 content = Column(Unicode, nullable=True)
921 title = Column(Unicode, nullable=True)
922 generator = Column(Integer, ForeignKey(Generator_R0.id), nullable=True)
923 object = Column(Integer,
924 ForeignKey(ActivityIntermediator_R0.id),
925 nullable=False)
926 target = Column(Integer,
927 ForeignKey(ActivityIntermediator_R0.id),
928 nullable=True)
930 @RegisterMigration(24, MIGRATIONS)
931 def activity_migration(db):
933 Creates everything to create activities in GMG
934 - Adds Activity, ActivityIntermediator and Generator table
935 - Creates GMG service generator for activities produced by the server
936 - Adds the activity_as_object and activity_as_target to objects/targets
937 - Retroactively adds activities for what we can acurately work out
939 # Set constants we'll use later
940 FOREIGN_KEY = "core__activity_intermediators.id"
941 ACTIVITY_COLUMN = "activity"
943 # Create the new tables.
944 ActivityIntermediator_R0.__table__.create(db.bind)
945 Generator_R0.__table__.create(db.bind)
946 Activity_R0.__table__.create(db.bind)
947 db.commit()
949 # Initiate the tables we want to use later
950 metadata = MetaData(bind=db.bind)
951 user_table = inspect_table(metadata, "core__users")
952 activity_table = inspect_table(metadata, "core__activities")
953 generator_table = inspect_table(metadata, "core__generators")
954 collection_table = inspect_table(metadata, "core__collections")
955 media_entry_table = inspect_table(metadata, "core__media_entries")
956 media_comments_table = inspect_table(metadata, "core__media_comments")
957 ai_table = inspect_table(metadata, "core__activity_intermediators")
960 # Create the foundations for Generator
961 db.execute(generator_table.insert().values(
962 name="GNU Mediagoblin",
963 object_type="service",
964 published=datetime.datetime.now(),
965 updated=datetime.datetime.now()
967 db.commit()
969 # Get the ID of that generator
970 gmg_generator = db.execute(generator_table.select(
971 generator_table.c.name==u"GNU Mediagoblin")).first()
974 # Now we want to modify the tables which MAY have an activity at some point
975 media_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
976 media_col.create(media_entry_table)
978 user_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
979 user_col.create(user_table)
981 comments_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
982 comments_col.create(media_comments_table)
984 collection_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
985 collection_col.create(collection_table)
986 db.commit()
989 # Now we want to retroactively add what activities we can
990 # first we'll add activities when people uploaded media.
991 # these can't have content as it's not fesible to get the
992 # correct content strings.
993 for media in db.execute(media_entry_table.select()):
994 # Now we want to create the intermedaitory
995 db_ai = db.execute(ai_table.insert().values(
996 type="media",
998 db_ai = db.execute(ai_table.select(
999 ai_table.c.id==db_ai.inserted_primary_key[0]
1000 )).first()
1002 # Add the activity
1003 activity = {
1004 "verb": "create",
1005 "actor": media.uploader,
1006 "published": media.created,
1007 "updated": media.created,
1008 "generator": gmg_generator.id,
1009 "object": db_ai.id
1011 db.execute(activity_table.insert().values(**activity))
1013 # Add the AI to the media.
1014 db.execute(media_entry_table.update().values(
1015 activity=db_ai.id
1016 ).where(media_entry_table.c.id==media.id))
1018 # Now we want to add all the comments people made
1019 for comment in db.execute(media_comments_table.select()):
1020 # Get the MediaEntry for the comment
1021 media_entry = db.execute(
1022 media_entry_table.select(
1023 media_entry_table.c.id==comment.media_entry
1024 )).first()
1026 # Create an AI for target
1027 db_ai_media = db.execute(ai_table.select(
1028 ai_table.c.id==media_entry.activity
1029 )).first().id
1031 db.execute(
1032 media_comments_table.update().values(
1033 activity=db_ai_media
1034 ).where(media_comments_table.c.id==media_entry.id))
1036 # Now create the AI for the comment
1037 db_ai_comment = db.execute(ai_table.insert().values(
1038 type="comment"
1039 )).inserted_primary_key[0]
1041 activity = {
1042 "verb": "comment",
1043 "actor": comment.author,
1044 "published": comment.created,
1045 "updated": comment.created,
1046 "generator": gmg_generator.id,
1047 "object": db_ai_comment,
1048 "target": db_ai_media,
1051 # Now add the comment object
1052 db.execute(activity_table.insert().values(**activity))
1054 # Now add activity to comment
1055 db.execute(media_comments_table.update().values(
1056 activity=db_ai_comment
1057 ).where(media_comments_table.c.id==comment.id))
1059 # Create 'create' activities for all collections
1060 for collection in db.execute(collection_table.select()):
1061 # create AI
1062 db_ai = db.execute(ai_table.insert().values(
1063 type="collection"
1065 db_ai = db.execute(ai_table.select(
1066 ai_table.c.id==db_ai.inserted_primary_key[0]
1067 )).first()
1069 # Now add link the collection to the AI
1070 db.execute(collection_table.update().values(
1071 activity=db_ai.id
1072 ).where(collection_table.c.id==collection.id))
1074 activity = {
1075 "verb": "create",
1076 "actor": collection.creator,
1077 "published": collection.created,
1078 "updated": collection.created,
1079 "generator": gmg_generator.id,
1080 "object": db_ai.id,
1083 db.execute(activity_table.insert().values(**activity))
1085 # Now add the activity to the collection
1086 db.execute(collection_table.update().values(
1087 activity=db_ai.id
1088 ).where(collection_table.c.id==collection.id))
1090 db.commit()
1092 class Location_V0(declarative_base()):
1093 __tablename__ = "core__locations"
1094 id = Column(Integer, primary_key=True)
1095 name = Column(Unicode)
1096 position = Column(MutationDict.as_mutable(JSONEncoded))
1097 address = Column(MutationDict.as_mutable(JSONEncoded))
1099 @RegisterMigration(25, MIGRATIONS)
1100 def add_location_model(db):
1101 """ Add location model """
1102 metadata = MetaData(bind=db.bind)
1104 # Create location table
1105 Location_V0.__table__.create(db.bind)
1106 db.commit()
1108 # Inspect the tables we need
1109 user = inspect_table(metadata, "core__users")
1110 collections = inspect_table(metadata, "core__collections")
1111 media_entry = inspect_table(metadata, "core__media_entries")
1112 media_comments = inspect_table(metadata, "core__media_comments")
1114 # Now add location support to the various models
1115 col = Column("location", Integer, ForeignKey(Location_V0.id))
1116 col.create(user)
1118 col = Column("location", Integer, ForeignKey(Location_V0.id))
1119 col.create(collections)
1121 col = Column("location", Integer, ForeignKey(Location_V0.id))
1122 col.create(media_entry)
1124 col = Column("location", Integer, ForeignKey(Location_V0.id))
1125 col.create(media_comments)
1127 db.commit()
1129 @RegisterMigration(26, MIGRATIONS)
1130 def datetime_to_utc(db):
1131 """ Convert datetime stamps to UTC """
1132 # Get the server's timezone, this is what the database has stored
1133 server_timezone = dateutil.tz.tzlocal()
1136 # Look up all the timestamps and convert them to UTC
1138 metadata = MetaData(bind=db.bind)
1140 def dt_to_utc(dt):
1141 # Add the current timezone
1142 dt = dt.replace(tzinfo=server_timezone)
1144 # Convert to UTC
1145 return dt.astimezone(pytz.UTC)
1147 # Convert the User model
1148 user_table = inspect_table(metadata, "core__users")
1149 for user in db.execute(user_table.select()):
1150 db.execute(user_table.update().values(
1151 created=dt_to_utc(user.created)
1152 ).where(user_table.c.id==user.id))
1154 # Convert Client
1155 client_table = inspect_table(metadata, "core__clients")
1156 for client in db.execute(client_table.select()):
1157 db.execute(client_table.update().values(
1158 created=dt_to_utc(client.created),
1159 updated=dt_to_utc(client.updated)
1160 ).where(client_table.c.id==client.id))
1162 # Convert RequestToken
1163 rt_table = inspect_table(metadata, "core__request_tokens")
1164 for request_token in db.execute(rt_table.select()):
1165 db.execute(rt_table.update().values(
1166 created=dt_to_utc(request_token.created),
1167 updated=dt_to_utc(request_token.updated)
1168 ).where(rt_table.c.token==request_token.token))
1170 # Convert AccessToken
1171 at_table = inspect_table(metadata, "core__access_tokens")
1172 for access_token in db.execute(at_table.select()):
1173 db.execute(at_table.update().values(
1174 created=dt_to_utc(access_token.created),
1175 updated=dt_to_utc(access_token.updated)
1176 ).where(at_table.c.token==access_token.token))
1178 # Convert MediaEntry
1179 media_table = inspect_table(metadata, "core__media_entries")
1180 for media in db.execute(media_table.select()):
1181 db.execute(media_table.update().values(
1182 created=dt_to_utc(media.created)
1183 ).where(media_table.c.id==media.id))
1185 # Convert Media Attachment File
1186 media_attachment_table = inspect_table(metadata, "core__attachment_files")
1187 for ma in db.execute(media_attachment_table.select()):
1188 db.execute(media_attachment_table.update().values(
1189 created=dt_to_utc(ma.created)
1190 ).where(media_attachment_table.c.id==ma.id))
1192 # Convert MediaComment
1193 comment_table = inspect_table(metadata, "core__media_comments")
1194 for comment in db.execute(comment_table.select()):
1195 db.execute(comment_table.update().values(
1196 created=dt_to_utc(comment.created)
1197 ).where(comment_table.c.id==comment.id))
1199 # Convert Collection
1200 collection_table = inspect_table(metadata, "core__collections")
1201 for collection in db.execute(collection_table.select()):
1202 db.execute(collection_table.update().values(
1203 created=dt_to_utc(collection.created)
1204 ).where(collection_table.c.id==collection.id))
1206 # Convert Collection Item
1207 collection_item_table = inspect_table(metadata, "core__collection_items")
1208 for ci in db.execute(collection_item_table.select()):
1209 db.execute(collection_item_table.update().values(
1210 added=dt_to_utc(ci.added)
1211 ).where(collection_item_table.c.id==ci.id))
1213 # Convert Comment subscription
1214 comment_sub = inspect_table(metadata, "core__comment_subscriptions")
1215 for sub in db.execute(comment_sub.select()):
1216 db.execute(comment_sub.update().values(
1217 created=dt_to_utc(sub.created)
1218 ).where(comment_sub.c.id==sub.id))
1220 # Convert Notification
1221 notification_table = inspect_table(metadata, "core__notifications")
1222 for notification in db.execute(notification_table.select()):
1223 db.execute(notification_table.update().values(
1224 created=dt_to_utc(notification.created)
1225 ).where(notification_table.c.id==notification.id))
1227 # Convert ReportBase
1228 reportbase_table = inspect_table(metadata, "core__reports")
1229 for report in db.execute(reportbase_table.select()):
1230 db.execute(reportbase_table.update().values(
1231 created=dt_to_utc(report.created)
1232 ).where(reportbase_table.c.id==report.id))
1234 # Convert Generator
1235 generator_table = inspect_table(metadata, "core__generators")
1236 for generator in db.execute(generator_table.select()):
1237 db.execute(generator_table.update().values(
1238 published=dt_to_utc(generator.published),
1239 updated=dt_to_utc(generator.updated)
1240 ).where(generator_table.c.id==generator.id))
1242 # Convert Activity
1243 activity_table = inspect_table(metadata, "core__activities")
1244 for activity in db.execute(activity_table.select()):
1245 db.execute(activity_table.update().values(
1246 published=dt_to_utc(activity.published),
1247 updated=dt_to_utc(activity.updated)
1248 ).where(activity_table.c.id==activity.id))
1250 # Commit this to the database
1251 db.commit()