1cc4b6252133124c32681ab3261ba3759c96165e
[mediagoblin.git] / mediagoblin / db / migrations.py
1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 import datetime
18 import uuid
19
20 import six
21
22 if six.PY2:
23 import migrate
24
25 import pytz
26 import dateutil.tz
27 from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
28 Integer, Unicode, UnicodeText, DateTime,
29 ForeignKey, Date, Index)
30 from sqlalchemy.exc import ProgrammingError
31 from sqlalchemy.ext.declarative import declarative_base
32 from sqlalchemy.sql import and_
33 from sqlalchemy.schema import UniqueConstraint
34
35 from mediagoblin.db.extratypes import JSONEncoded, MutationDict
36 from mediagoblin.db.migration_tools import (
37 RegisterMigration, inspect_table, replace_table_hack)
38 from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
39 Privilege, Generator, GenericForeignKey)
40 from mediagoblin.db.extratypes import JSONEncoded, MutationDict
41
42
43 MIGRATIONS = {}
44
45
46 @RegisterMigration(1, MIGRATIONS)
47 def ogg_to_webm_audio(db_conn):
48 metadata = MetaData(bind=db_conn.bind)
49
50 file_keynames = Table('core__file_keynames', metadata, autoload=True,
51 autoload_with=db_conn.bind)
52
53 db_conn.execute(
54 file_keynames.update().where(file_keynames.c.name == 'ogg').
55 values(name='webm_audio')
56 )
57 db_conn.commit()
58
59
60 @RegisterMigration(2, MIGRATIONS)
61 def add_wants_notification_column(db_conn):
62 metadata = MetaData(bind=db_conn.bind)
63
64 users = Table('core__users', metadata, autoload=True,
65 autoload_with=db_conn.bind)
66
67 col = Column('wants_comment_notification', Boolean,
68 default=True, nullable=True)
69 col.create(users, populate_defaults=True)
70 db_conn.commit()
71
72
73 @RegisterMigration(3, MIGRATIONS)
74 def add_transcoding_progress(db_conn):
75 metadata = MetaData(bind=db_conn.bind)
76
77 media_entry = inspect_table(metadata, 'core__media_entries')
78
79 col = Column('transcoding_progress', SmallInteger)
80 col.create(media_entry)
81 db_conn.commit()
82
83
84 class Collection_v0(declarative_base()):
85 __tablename__ = "core__collections"
86
87 id = Column(Integer, primary_key=True)
88 title = Column(Unicode, nullable=False)
89 slug = Column(Unicode)
90 created = Column(DateTime, nullable=False, default=datetime.datetime.now,
91 index=True)
92 description = Column(UnicodeText)
93 creator = Column(Integer, ForeignKey(User.id), nullable=False)
94 items = Column(Integer, default=0)
95
96 class CollectionItem_v0(declarative_base()):
97 __tablename__ = "core__collection_items"
98
99 id = Column(Integer, primary_key=True)
100 media_entry = Column(
101 Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
102 collection = Column(Integer, ForeignKey(Collection.id), nullable=False)
103 note = Column(UnicodeText, nullable=True)
104 added = Column(DateTime, nullable=False, default=datetime.datetime.now)
105 position = Column(Integer)
106
107 ## This should be activated, normally.
108 ## But this would change the way the next migration used to work.
109 ## So it's commented for now.
110 __table_args__ = (
111 UniqueConstraint('collection', 'media_entry'),
112 {})
113
114 collectionitem_unique_constraint_done = False
115
116 @RegisterMigration(4, MIGRATIONS)
117 def add_collection_tables(db_conn):
118 Collection_v0.__table__.create(db_conn.bind)
119 CollectionItem_v0.__table__.create(db_conn.bind)
120
121 global collectionitem_unique_constraint_done
122 collectionitem_unique_constraint_done = True
123
124 db_conn.commit()
125
126
127 @RegisterMigration(5, MIGRATIONS)
128 def add_mediaentry_collected(db_conn):
129 metadata = MetaData(bind=db_conn.bind)
130
131 media_entry = inspect_table(metadata, 'core__media_entries')
132
133 col = Column('collected', Integer, default=0)
134 col.create(media_entry)
135 db_conn.commit()
136
137
138 class ProcessingMetaData_v0(declarative_base()):
139 __tablename__ = 'core__processing_metadata'
140
141 id = Column(Integer, primary_key=True)
142 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False,
143 index=True)
144 callback_url = Column(Unicode)
145
146 @RegisterMigration(6, MIGRATIONS)
147 def create_processing_metadata_table(db):
148 ProcessingMetaData_v0.__table__.create(db.bind)
149 db.commit()
150
151
152 # Okay, problem being:
153 # Migration #4 forgot to add the uniqueconstraint for the
154 # new tables. While creating the tables from scratch had
155 # the constraint enabled.
156 #
157 # So we have four situations that should end up at the same
158 # db layout:
159 #
160 # 1. Fresh install.
161 # Well, easy. Just uses the tables in models.py
162 # 2. Fresh install using a git version just before this migration
163 # The tables are all there, the unique constraint is also there.
164 # This migration should do nothing.
165 # But as we can't detect the uniqueconstraint easily,
166 # this migration just adds the constraint again.
167 # And possibly fails very loud. But ignores the failure.
168 # 3. old install, not using git, just releases.
169 # This one will get the new tables in #4 (now with constraint!)
170 # And this migration is just skipped silently.
171 # 4. old install, always on latest git.
172 # This one has the tables, but lacks the constraint.
173 # So this migration adds the constraint.
174 @RegisterMigration(7, MIGRATIONS)
175 def fix_CollectionItem_v0_constraint(db_conn):
176 """Add the forgotten Constraint on CollectionItem"""
177
178 global collectionitem_unique_constraint_done
179 if collectionitem_unique_constraint_done:
180 # Reset it. Maybe the whole thing gets run again
181 # For a different db?
182 collectionitem_unique_constraint_done = False
183 return
184
185 metadata = MetaData(bind=db_conn.bind)
186
187 CollectionItem_table = inspect_table(metadata, 'core__collection_items')
188
189 constraint = UniqueConstraint('collection', 'media_entry',
190 name='core__collection_items_collection_media_entry_key',
191 table=CollectionItem_table)
192
193 try:
194 constraint.create()
195 except ProgrammingError:
196 # User probably has an install that was run since the
197 # collection tables were added, so we don't need to run this migration.
198 pass
199
200 db_conn.commit()
201
202
203 @RegisterMigration(8, MIGRATIONS)
204 def add_license_preference(db):
205 metadata = MetaData(bind=db.bind)
206
207 user_table = inspect_table(metadata, 'core__users')
208
209 col = Column('license_preference', Unicode)
210 col.create(user_table)
211 db.commit()
212
213
214 @RegisterMigration(9, MIGRATIONS)
215 def mediaentry_new_slug_era(db):
216 """
217 Update for the new era for media type slugs.
218
219 Entries without slugs now display differently in the url like:
220 /u/cwebber/m/id=251/
221
222 ... because of this, we should back-convert:
223 - entries without slugs should be converted to use the id, if possible, to
224 make old urls still work
225 - slugs with = (or also : which is now also not allowed) to have those
226 stripped out (small possibility of breakage here sadly)
227 """
228
229 def slug_and_user_combo_exists(slug, uploader):
230 return db.execute(
231 media_table.select(
232 and_(media_table.c.uploader==uploader,
233 media_table.c.slug==slug))).first() is not None
234
235 def append_garbage_till_unique(row, new_slug):
236 """
237 Attach junk to this row until it's unique, then save it
238 """
239 if slug_and_user_combo_exists(new_slug, row.uploader):
240 # okay, still no success;
241 # let's whack junk on there till it's unique.
242 new_slug += '-' + uuid.uuid4().hex[:4]
243 # keep going if necessary!
244 while slug_and_user_combo_exists(new_slug, row.uploader):
245 new_slug += uuid.uuid4().hex[:4]
246
247 db.execute(
248 media_table.update(). \
249 where(media_table.c.id==row.id). \
250 values(slug=new_slug))
251
252 metadata = MetaData(bind=db.bind)
253
254 media_table = inspect_table(metadata, 'core__media_entries')
255
256 for row in db.execute(media_table.select()):
257 # no slug, try setting to an id
258 if not row.slug:
259 append_garbage_till_unique(row, six.text_type(row.id))
260 # has "=" or ":" in it... we're getting rid of those
261 elif u"=" in row.slug or u":" in row.slug:
262 append_garbage_till_unique(
263 row, row.slug.replace(u"=", u"-").replace(u":", u"-"))
264
265 db.commit()
266
267
268 @RegisterMigration(10, MIGRATIONS)
269 def unique_collections_slug(db):
270 """Add unique constraint to collection slug"""
271 metadata = MetaData(bind=db.bind)
272 collection_table = inspect_table(metadata, "core__collections")
273 existing_slugs = {}
274 slugs_to_change = []
275
276 for row in db.execute(collection_table.select()):
277 # if duplicate slug, generate a unique slug
278 if row.creator in existing_slugs and row.slug in \
279 existing_slugs[row.creator]:
280 slugs_to_change.append(row.id)
281 else:
282 if not row.creator in existing_slugs:
283 existing_slugs[row.creator] = [row.slug]
284 else:
285 existing_slugs[row.creator].append(row.slug)
286
287 for row_id in slugs_to_change:
288 new_slug = six.text_type(uuid.uuid4())
289 db.execute(collection_table.update().
290 where(collection_table.c.id == row_id).
291 values(slug=new_slug))
292 # sqlite does not like to change the schema when a transaction(update) is
293 # not yet completed
294 db.commit()
295
296 constraint = UniqueConstraint('creator', 'slug',
297 name='core__collection_creator_slug_key',
298 table=collection_table)
299 constraint.create()
300
301 db.commit()
302
303 @RegisterMigration(11, MIGRATIONS)
304 def drop_token_related_User_columns(db):
305 """
306 Drop unneeded columns from the User table after switching to using
307 itsdangerous tokens for email and forgot password verification.
308 """
309 metadata = MetaData(bind=db.bind)
310 user_table = inspect_table(metadata, 'core__users')
311
312 verification_key = user_table.columns['verification_key']
313 fp_verification_key = user_table.columns['fp_verification_key']
314 fp_token_expire = user_table.columns['fp_token_expire']
315
316 verification_key.drop()
317 fp_verification_key.drop()
318 fp_token_expire.drop()
319
320 db.commit()
321
322
323 class CommentSubscription_v0(declarative_base()):
324 __tablename__ = 'core__comment_subscriptions'
325 id = Column(Integer, primary_key=True)
326
327 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
328
329 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
330
331 user_id = Column(Integer, ForeignKey(User.id), nullable=False)
332
333 notify = Column(Boolean, nullable=False, default=True)
334 send_email = Column(Boolean, nullable=False, default=True)
335
336
337 class Notification_v0(declarative_base()):
338 __tablename__ = 'core__notifications'
339 id = Column(Integer, primary_key=True)
340 type = Column(Unicode)
341
342 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
343
344 user_id = Column(Integer, ForeignKey(User.id), nullable=False,
345 index=True)
346 seen = Column(Boolean, default=lambda: False, index=True)
347
348
349 class CommentNotification_v0(Notification_v0):
350 __tablename__ = 'core__comment_notifications'
351 id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
352
353 subject_id = Column(Integer, ForeignKey(MediaComment.id))
354
355
356 class ProcessingNotification_v0(Notification_v0):
357 __tablename__ = 'core__processing_notifications'
358
359 id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
360
361 subject_id = Column(Integer, ForeignKey(MediaEntry.id))
362
363
364 @RegisterMigration(12, MIGRATIONS)
365 def add_new_notification_tables(db):
366 metadata = MetaData(bind=db.bind)
367
368 user_table = inspect_table(metadata, 'core__users')
369 mediaentry_table = inspect_table(metadata, 'core__media_entries')
370 mediacomment_table = inspect_table(metadata, 'core__media_comments')
371
372 CommentSubscription_v0.__table__.create(db.bind)
373
374 Notification_v0.__table__.create(db.bind)
375 CommentNotification_v0.__table__.create(db.bind)
376 ProcessingNotification_v0.__table__.create(db.bind)
377
378 db.commit()
379
380
381 @RegisterMigration(13, MIGRATIONS)
382 def pw_hash_nullable(db):
383 """Make pw_hash column nullable"""
384 metadata = MetaData(bind=db.bind)
385 user_table = inspect_table(metadata, "core__users")
386
387 user_table.c.pw_hash.alter(nullable=True)
388
389 # sqlite+sqlalchemy seems to drop this constraint during the
390 # migration, so we add it back here for now a bit manually.
391 if db.bind.url.drivername == 'sqlite':
392 constraint = UniqueConstraint('username', table=user_table)
393 constraint.create()
394
395 db.commit()
396
397
398 # oauth1 migrations
399 class Client_v0(declarative_base()):
400 """
401 Model representing a client - Used for API Auth
402 """
403 __tablename__ = "core__clients"
404
405 id = Column(Unicode, nullable=True, primary_key=True)
406 secret = Column(Unicode, nullable=False)
407 expirey = Column(DateTime, nullable=True)
408 application_type = Column(Unicode, nullable=False)
409 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
410 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
411
412 # optional stuff
413 redirect_uri = Column(JSONEncoded, nullable=True)
414 logo_url = Column(Unicode, nullable=True)
415 application_name = Column(Unicode, nullable=True)
416 contacts = Column(JSONEncoded, nullable=True)
417
418 def __repr__(self):
419 if self.application_name:
420 return "<Client {0} - {1}>".format(self.application_name, self.id)
421 else:
422 return "<Client {0}>".format(self.id)
423
424 class RequestToken_v0(declarative_base()):
425 """
426 Model for representing the request tokens
427 """
428 __tablename__ = "core__request_tokens"
429
430 token = Column(Unicode, primary_key=True)
431 secret = Column(Unicode, nullable=False)
432 client = Column(Unicode, ForeignKey(Client_v0.id))
433 user = Column(Integer, ForeignKey(User.id), nullable=True)
434 used = Column(Boolean, default=False)
435 authenticated = Column(Boolean, default=False)
436 verifier = Column(Unicode, nullable=True)
437 callback = Column(Unicode, nullable=False, default=u"oob")
438 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
439 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
440
441 class AccessToken_v0(declarative_base()):
442 """
443 Model for representing the access tokens
444 """
445 __tablename__ = "core__access_tokens"
446
447 token = Column(Unicode, nullable=False, primary_key=True)
448 secret = Column(Unicode, nullable=False)
449 user = Column(Integer, ForeignKey(User.id))
450 request_token = Column(Unicode, ForeignKey(RequestToken_v0.token))
451 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
452 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
453
454
455 class NonceTimestamp_v0(declarative_base()):
456 """
457 A place the timestamp and nonce can be stored - this is for OAuth1
458 """
459 __tablename__ = "core__nonce_timestamps"
460
461 nonce = Column(Unicode, nullable=False, primary_key=True)
462 timestamp = Column(DateTime, nullable=False, primary_key=True)
463
464
465 @RegisterMigration(14, MIGRATIONS)
466 def create_oauth1_tables(db):
467 """ Creates the OAuth1 tables """
468
469 Client_v0.__table__.create(db.bind)
470 RequestToken_v0.__table__.create(db.bind)
471 AccessToken_v0.__table__.create(db.bind)
472 NonceTimestamp_v0.__table__.create(db.bind)
473
474 db.commit()
475
476 @RegisterMigration(15, MIGRATIONS)
477 def wants_notifications(db):
478 """Add a wants_notifications field to User model"""
479 metadata = MetaData(bind=db.bind)
480 user_table = inspect_table(metadata, "core__users")
481 col = Column('wants_notifications', Boolean, default=True)
482 col.create(user_table)
483 db.commit()
484
485
486
487 @RegisterMigration(16, MIGRATIONS)
488 def upload_limits(db):
489 """Add user upload limit columns"""
490 metadata = MetaData(bind=db.bind)
491
492 user_table = inspect_table(metadata, 'core__users')
493 media_entry_table = inspect_table(metadata, 'core__media_entries')
494
495 col = Column('uploaded', Integer, default=0)
496 col.create(user_table)
497
498 col = Column('upload_limit', Integer)
499 col.create(user_table)
500
501 col = Column('file_size', Integer, default=0)
502 col.create(media_entry_table)
503
504 db.commit()
505
506
507 @RegisterMigration(17, MIGRATIONS)
508 def add_file_metadata(db):
509 """Add file_metadata to MediaFile"""
510 metadata = MetaData(bind=db.bind)
511 media_file_table = inspect_table(metadata, "core__mediafiles")
512
513 col = Column('file_metadata', MutationDict.as_mutable(JSONEncoded))
514 col.create(media_file_table)
515
516 db.commit()
517
518 ###################
519 # Moderation tables
520 ###################
521
522 class ReportBase_v0(declarative_base()):
523 __tablename__ = 'core__reports'
524 id = Column(Integer, primary_key=True)
525 reporter_id = Column(Integer, ForeignKey(User.id), nullable=False)
526 report_content = Column(UnicodeText)
527 reported_user_id = Column(Integer, ForeignKey(User.id), nullable=False)
528 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
529 discriminator = Column('type', Unicode(50))
530 resolver_id = Column(Integer, ForeignKey(User.id))
531 resolved = Column(DateTime)
532 result = Column(UnicodeText)
533 __mapper_args__ = {'polymorphic_on': discriminator}
534
535
536 class CommentReport_v0(ReportBase_v0):
537 __tablename__ = 'core__reports_on_comments'
538 __mapper_args__ = {'polymorphic_identity': 'comment_report'}
539
540 id = Column('id',Integer, ForeignKey('core__reports.id'),
541 primary_key=True)
542 comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
543
544
545 class MediaReport_v0(ReportBase_v0):
546 __tablename__ = 'core__reports_on_media'
547 __mapper_args__ = {'polymorphic_identity': 'media_report'}
548
549 id = Column('id',Integer, ForeignKey('core__reports.id'), primary_key=True)
550 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=True)
551
552
553 class UserBan_v0(declarative_base()):
554 __tablename__ = 'core__user_bans'
555 user_id = Column(Integer, ForeignKey(User.id), nullable=False,
556 primary_key=True)
557 expiration_date = Column(Date)
558 reason = Column(UnicodeText, nullable=False)
559
560
561 class Privilege_v0(declarative_base()):
562 __tablename__ = 'core__privileges'
563 id = Column(Integer, nullable=False, primary_key=True, unique=True)
564 privilege_name = Column(Unicode, nullable=False, unique=True)
565
566
567 class PrivilegeUserAssociation_v0(declarative_base()):
568 __tablename__ = 'core__privileges_users'
569 privilege_id = Column(
570 'core__privilege_id',
571 Integer,
572 ForeignKey(User.id),
573 primary_key=True)
574 user_id = Column(
575 'core__user_id',
576 Integer,
577 ForeignKey(Privilege.id),
578 primary_key=True)
579
580
581 PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':u'admin'},
582 {'privilege_name':u'moderator'},
583 {'privilege_name':u'uploader'},
584 {'privilege_name':u'reporter'},
585 {'privilege_name':u'commenter'},
586 {'privilege_name':u'active'}]
587
588 # vR1 stands for "version Rename 1". This only exists because we need
589 # to deal with dropping some booleans and it's otherwise impossible
590 # with sqlite.
591
592 class User_vR1(declarative_base()):
593 __tablename__ = 'rename__users'
594 id = Column(Integer, primary_key=True)
595 username = Column(Unicode, nullable=False, unique=True)
596 email = Column(Unicode, nullable=False)
597 pw_hash = Column(Unicode)
598 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
599 wants_comment_notification = Column(Boolean, default=True)
600 wants_notifications = Column(Boolean, default=True)
601 license_preference = Column(Unicode)
602 url = Column(Unicode)
603 bio = Column(UnicodeText) # ??
604 uploaded = Column(Integer, default=0)
605 upload_limit = Column(Integer)
606
607
608 @RegisterMigration(18, MIGRATIONS)
609 def create_moderation_tables(db):
610
611 # First, we will create the new tables in the database.
612 #--------------------------------------------------------------------------
613 ReportBase_v0.__table__.create(db.bind)
614 CommentReport_v0.__table__.create(db.bind)
615 MediaReport_v0.__table__.create(db.bind)
616 UserBan_v0.__table__.create(db.bind)
617 Privilege_v0.__table__.create(db.bind)
618 PrivilegeUserAssociation_v0.__table__.create(db.bind)
619
620 db.commit()
621
622 # Then initialize the tables that we will later use
623 #--------------------------------------------------------------------------
624 metadata = MetaData(bind=db.bind)
625 privileges_table= inspect_table(metadata, "core__privileges")
626 user_table = inspect_table(metadata, 'core__users')
627 user_privilege_assoc = inspect_table(
628 metadata, 'core__privileges_users')
629
630 # This section initializes the default Privilege foundations, that
631 # would be created through the FOUNDATIONS system in a new instance
632 #--------------------------------------------------------------------------
633 for parameters in PRIVILEGE_FOUNDATIONS_v0:
634 db.execute(privileges_table.insert().values(**parameters))
635
636 db.commit()
637
638 # This next section takes the information from the old is_admin and status
639 # columns and converts those to the new privilege system
640 #--------------------------------------------------------------------------
641 admin_users_ids, active_users_ids, inactive_users_ids = (
642 db.execute(
643 user_table.select().where(
644 user_table.c.is_admin==True)).fetchall(),
645 db.execute(
646 user_table.select().where(
647 user_table.c.is_admin==False).where(
648 user_table.c.status==u"active")).fetchall(),
649 db.execute(
650 user_table.select().where(
651 user_table.c.is_admin==False).where(
652 user_table.c.status!=u"active")).fetchall())
653
654 # Get the ids for each of the privileges so we can reference them ~~~~~~~~~
655 (admin_privilege_id, uploader_privilege_id,
656 reporter_privilege_id, commenter_privilege_id,
657 active_privilege_id) = [
658 db.execute(privileges_table.select().where(
659 privileges_table.c.privilege_name==privilege_name)).first()['id']
660 for privilege_name in
661 [u"admin",u"uploader",u"reporter",u"commenter",u"active"]
662 ]
663
664 # Give each user the appopriate privileges depending whether they are an
665 # admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
666 for admin_user in admin_users_ids:
667 admin_user_id = admin_user['id']
668 for privilege_id in [admin_privilege_id, uploader_privilege_id,
669 reporter_privilege_id, commenter_privilege_id,
670 active_privilege_id]:
671 db.execute(user_privilege_assoc.insert().values(
672 core__privilege_id=admin_user_id,
673 core__user_id=privilege_id))
674
675 for active_user in active_users_ids:
676 active_user_id = active_user['id']
677 for privilege_id in [uploader_privilege_id, reporter_privilege_id,
678 commenter_privilege_id, active_privilege_id]:
679 db.execute(user_privilege_assoc.insert().values(
680 core__privilege_id=active_user_id,
681 core__user_id=privilege_id))
682
683 for inactive_user in inactive_users_ids:
684 inactive_user_id = inactive_user['id']
685 for privilege_id in [uploader_privilege_id, reporter_privilege_id,
686 commenter_privilege_id]:
687 db.execute(user_privilege_assoc.insert().values(
688 core__privilege_id=inactive_user_id,
689 core__user_id=privilege_id))
690
691 db.commit()
692
693 # And then, once the information is taken from is_admin & status columns
694 # we drop all of the vestigial columns from the User table.
695 #--------------------------------------------------------------------------
696 if db.bind.url.drivername == 'sqlite':
697 # SQLite has some issues that make it *impossible* to drop boolean
698 # columns. So, the following code is a very hacky workaround which
699 # makes it possible. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
700
701 User_vR1.__table__.create(db.bind)
702 db.commit()
703 new_user_table = inspect_table(metadata, 'rename__users')
704 replace_table_hack(db, user_table, new_user_table)
705 else:
706 # If the db is not run using SQLite, this process is much simpler ~~~~~
707
708 status = user_table.columns['status']
709 email_verified = user_table.columns['email_verified']
710 is_admin = user_table.columns['is_admin']
711 status.drop()
712 email_verified.drop()
713 is_admin.drop()
714
715 db.commit()
716
717
718 @RegisterMigration(19, MIGRATIONS)
719 def drop_MediaEntry_collected(db):
720 """
721 Drop unused MediaEntry.collected column
722 """
723 metadata = MetaData(bind=db.bind)
724
725 media_collected= inspect_table(metadata, 'core__media_entries')
726 media_collected = media_collected.columns['collected']
727
728 media_collected.drop()
729
730 db.commit()
731
732
733 @RegisterMigration(20, MIGRATIONS)
734 def add_metadata_column(db):
735 metadata = MetaData(bind=db.bind)
736
737 media_entry = inspect_table(metadata, 'core__media_entries')
738
739 col = Column('media_metadata', MutationDict.as_mutable(JSONEncoded),
740 default=MutationDict())
741 col.create(media_entry)
742
743 db.commit()
744
745
746 class PrivilegeUserAssociation_R1(declarative_base()):
747 __tablename__ = 'rename__privileges_users'
748 user = Column(
749 "user",
750 Integer,
751 ForeignKey(User.id),
752 primary_key=True)
753 privilege = Column(
754 "privilege",
755 Integer,
756 ForeignKey(Privilege.id),
757 primary_key=True)
758
759 @RegisterMigration(21, MIGRATIONS)
760 def fix_privilege_user_association_table(db):
761 """
762 There was an error in the PrivilegeUserAssociation table that allowed for a
763 dangerous sql error. We need to the change the name of the columns to be
764 unique, and properly referenced.
765 """
766 metadata = MetaData(bind=db.bind)
767
768 privilege_user_assoc = inspect_table(
769 metadata, 'core__privileges_users')
770
771 # This whole process is more complex if we're dealing with sqlite
772 if db.bind.url.drivername == 'sqlite':
773 PrivilegeUserAssociation_R1.__table__.create(db.bind)
774 db.commit()
775
776 new_privilege_user_assoc = inspect_table(
777 metadata, 'rename__privileges_users')
778 result = db.execute(privilege_user_assoc.select())
779 for row in result:
780 # The columns were improperly named before, so we switch the columns
781 user_id, priv_id = row['core__privilege_id'], row['core__user_id']
782 db.execute(new_privilege_user_assoc.insert().values(
783 user=user_id,
784 privilege=priv_id))
785
786 db.commit()
787
788 privilege_user_assoc.drop()
789 new_privilege_user_assoc.rename('core__privileges_users')
790
791 # much simpler if postgres though!
792 else:
793 privilege_user_assoc.c.core__user_id.alter(name="privilege")
794 privilege_user_assoc.c.core__privilege_id.alter(name="user")
795
796 db.commit()
797
798
799 @RegisterMigration(22, MIGRATIONS)
800 def add_index_username_field(db):
801 """
802 This migration has been found to be doing the wrong thing. See
803 the documentation in migration 23 (revert_username_index) below
804 which undoes this for those databases that did run this migration.
805
806 Old description:
807 This indexes the User.username field which is frequently queried
808 for example a user logging in. This solves the issue #894
809 """
810 ## This code is left commented out *on purpose!*
811 ##
812 ## We do not normally allow commented out code like this in
813 ## MediaGoblin but this is a special case: since this migration has
814 ## been nullified but with great work to set things back below,
815 ## this is commented out for historical clarity.
816 #
817 # metadata = MetaData(bind=db.bind)
818 # user_table = inspect_table(metadata, "core__users")
819 #
820 # new_index = Index("ix_core__users_uploader", user_table.c.username)
821 # new_index.create()
822 #
823 # db.commit()
824 pass
825
826
827 @RegisterMigration(23, MIGRATIONS)
828 def revert_username_index(db):
829 """
830 Revert the stuff we did in migration 22 above.
831
832 There were a couple of problems with what we did:
833 - There was never a need for this migration! The unique
834 constraint had an implicit b-tree index, so it wasn't really
835 needed. (This is my (Chris Webber's) fault for suggesting it
836 needed to happen without knowing what's going on... my bad!)
837 - On top of that, databases created after the models.py was
838 changed weren't the same as those that had been run through
839 migration 22 above.
840
841 As such, we're setting things back to the way they were before,
842 but as it turns out, that's tricky to do!
843 """
844 metadata = MetaData(bind=db.bind)
845 user_table = inspect_table(metadata, "core__users")
846 indexes = dict(
847 [(index.name, index) for index in user_table.indexes])
848
849 # index from unnecessary migration
850 users_uploader_index = indexes.get(u'ix_core__users_uploader')
851 # index created from models.py after (unique=True, index=True)
852 # was set in models.py
853 users_username_index = indexes.get(u'ix_core__users_username')
854
855 if users_uploader_index is None and users_username_index is None:
856 # We don't need to do anything.
857 # The database isn't in a state where it needs fixing
858 #
859 # (ie, either went through the previous borked migration or
860 # was initialized with a models.py where core__users was both
861 # unique=True and index=True)
862 return
863
864 if db.bind.url.drivername == 'sqlite':
865 # Again, sqlite has problems. So this is tricky.
866
867 # Yes, this is correct to use User_vR1! Nothing has changed
868 # between the *correct* version of this table and migration 18.
869 User_vR1.__table__.create(db.bind)
870 db.commit()
871 new_user_table = inspect_table(metadata, 'rename__users')
872 replace_table_hack(db, user_table, new_user_table)
873
874 else:
875 # If the db is not run using SQLite, we don't need to do crazy
876 # table copying.
877
878 # Remove whichever of the not-used indexes are in place
879 if users_uploader_index is not None:
880 users_uploader_index.drop()
881 if users_username_index is not None:
882 users_username_index.drop()
883
884 # Given we're removing indexes then adding a unique constraint
885 # which *we know might fail*, thus probably rolling back the
886 # session, let's commit here.
887 db.commit()
888
889 try:
890 # Add the unique constraint
891 constraint = UniqueConstraint(
892 'username', table=user_table)
893 constraint.create()
894 except ProgrammingError:
895 # constraint already exists, no need to add
896 db.rollback()
897
898 db.commit()
899
900 class Generator_R0(declarative_base()):
901 __tablename__ = "core__generators"
902 id = Column(Integer, primary_key=True)
903 name = Column(Unicode, nullable=False)
904 published = Column(DateTime, nullable=False, default=datetime.datetime.now)
905 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
906 object_type = Column(Unicode, nullable=False)
907
908 class ActivityIntermediator_R0(declarative_base()):
909 __tablename__ = "core__activity_intermediators"
910 id = Column(Integer, primary_key=True)
911 type = Column(Unicode, nullable=False)
912
913 # These are needed for migration 29
914 TYPES = {
915 "user": User,
916 "media": MediaEntry,
917 "comment": MediaComment,
918 "collection": Collection,
919 }
920
921 class Activity_R0(declarative_base()):
922 __tablename__ = "core__activities"
923 id = Column(Integer, primary_key=True)
924 actor = Column(Integer, ForeignKey(User.id), nullable=False)
925 published = Column(DateTime, nullable=False, default=datetime.datetime.now)
926 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
927 verb = Column(Unicode, nullable=False)
928 content = Column(Unicode, nullable=True)
929 title = Column(Unicode, nullable=True)
930 generator = Column(Integer, ForeignKey(Generator_R0.id), nullable=True)
931 object = Column(Integer,
932 ForeignKey(ActivityIntermediator_R0.id),
933 nullable=False)
934 target = Column(Integer,
935 ForeignKey(ActivityIntermediator_R0.id),
936 nullable=True)
937
938
939 @RegisterMigration(24, MIGRATIONS)
940 def activity_migration(db):
941 """
942 Creates everything to create activities in GMG
943 - Adds Activity, ActivityIntermediator and Generator table
944 - Creates GMG service generator for activities produced by the server
945 - Adds the activity_as_object and activity_as_target to objects/targets
946 - Retroactively adds activities for what we can acurately work out
947 """
948 # Set constants we'll use later
949 FOREIGN_KEY = "core__activity_intermediators.id"
950 ACTIVITY_COLUMN = "activity"
951
952 # Create the new tables.
953 ActivityIntermediator_R0.__table__.create(db.bind)
954 Generator_R0.__table__.create(db.bind)
955 Activity_R0.__table__.create(db.bind)
956 db.commit()
957
958 # Initiate the tables we want to use later
959 metadata = MetaData(bind=db.bind)
960 user_table = inspect_table(metadata, "core__users")
961 activity_table = inspect_table(metadata, "core__activities")
962 generator_table = inspect_table(metadata, "core__generators")
963 collection_table = inspect_table(metadata, "core__collections")
964 media_entry_table = inspect_table(metadata, "core__media_entries")
965 media_comments_table = inspect_table(metadata, "core__media_comments")
966 ai_table = inspect_table(metadata, "core__activity_intermediators")
967
968
969 # Create the foundations for Generator
970 db.execute(generator_table.insert().values(
971 name="GNU Mediagoblin",
972 object_type="service",
973 published=datetime.datetime.now(),
974 updated=datetime.datetime.now()
975 ))
976 db.commit()
977
978 # Get the ID of that generator
979 gmg_generator = db.execute(generator_table.select(
980 generator_table.c.name==u"GNU Mediagoblin")).first()
981
982
983 # Now we want to modify the tables which MAY have an activity at some point
984 media_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
985 media_col.create(media_entry_table)
986
987 user_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
988 user_col.create(user_table)
989
990 comments_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
991 comments_col.create(media_comments_table)
992
993 collection_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
994 collection_col.create(collection_table)
995 db.commit()
996
997
998 # Now we want to retroactively add what activities we can
999 # first we'll add activities when people uploaded media.
1000 # these can't have content as it's not fesible to get the
1001 # correct content strings.
1002 for media in db.execute(media_entry_table.select()):
1003 # Now we want to create the intermedaitory
1004 db_ai = db.execute(ai_table.insert().values(
1005 type="media",
1006 ))
1007 db_ai = db.execute(ai_table.select(
1008 ai_table.c.id==db_ai.inserted_primary_key[0]
1009 )).first()
1010
1011 # Add the activity
1012 activity = {
1013 "verb": "create",
1014 "actor": media.uploader,
1015 "published": media.created,
1016 "updated": media.created,
1017 "generator": gmg_generator.id,
1018 "object": db_ai.id
1019 }
1020 db.execute(activity_table.insert().values(**activity))
1021
1022 # Add the AI to the media.
1023 db.execute(media_entry_table.update().values(
1024 activity=db_ai.id
1025 ).where(media_entry_table.c.id==media.id))
1026
1027 # Now we want to add all the comments people made
1028 for comment in db.execute(media_comments_table.select()):
1029 # Get the MediaEntry for the comment
1030 media_entry = db.execute(
1031 media_entry_table.select(
1032 media_entry_table.c.id==comment.media_entry
1033 )).first()
1034
1035 # Create an AI for target
1036 db_ai_media = db.execute(ai_table.select(
1037 ai_table.c.id==media_entry.activity
1038 )).first().id
1039
1040 db.execute(
1041 media_comments_table.update().values(
1042 activity=db_ai_media
1043 ).where(media_comments_table.c.id==media_entry.id))
1044
1045 # Now create the AI for the comment
1046 db_ai_comment = db.execute(ai_table.insert().values(
1047 type="comment"
1048 )).inserted_primary_key[0]
1049
1050 activity = {
1051 "verb": "comment",
1052 "actor": comment.author,
1053 "published": comment.created,
1054 "updated": comment.created,
1055 "generator": gmg_generator.id,
1056 "object": db_ai_comment,
1057 "target": db_ai_media,
1058 }
1059
1060 # Now add the comment object
1061 db.execute(activity_table.insert().values(**activity))
1062
1063 # Now add activity to comment
1064 db.execute(media_comments_table.update().values(
1065 activity=db_ai_comment
1066 ).where(media_comments_table.c.id==comment.id))
1067
1068 # Create 'create' activities for all collections
1069 for collection in db.execute(collection_table.select()):
1070 # create AI
1071 db_ai = db.execute(ai_table.insert().values(
1072 type="collection"
1073 ))
1074 db_ai = db.execute(ai_table.select(
1075 ai_table.c.id==db_ai.inserted_primary_key[0]
1076 )).first()
1077
1078 # Now add link the collection to the AI
1079 db.execute(collection_table.update().values(
1080 activity=db_ai.id
1081 ).where(collection_table.c.id==collection.id))
1082
1083 activity = {
1084 "verb": "create",
1085 "actor": collection.creator,
1086 "published": collection.created,
1087 "updated": collection.created,
1088 "generator": gmg_generator.id,
1089 "object": db_ai.id,
1090 }
1091
1092 db.execute(activity_table.insert().values(**activity))
1093
1094 # Now add the activity to the collection
1095 db.execute(collection_table.update().values(
1096 activity=db_ai.id
1097 ).where(collection_table.c.id==collection.id))
1098
1099 db.commit()
1100
1101 class Location_V0(declarative_base()):
1102 __tablename__ = "core__locations"
1103 id = Column(Integer, primary_key=True)
1104 name = Column(Unicode)
1105 position = Column(MutationDict.as_mutable(JSONEncoded))
1106 address = Column(MutationDict.as_mutable(JSONEncoded))
1107
1108 @RegisterMigration(25, MIGRATIONS)
1109 def add_location_model(db):
1110 """ Add location model """
1111 metadata = MetaData(bind=db.bind)
1112
1113 # Create location table
1114 Location_V0.__table__.create(db.bind)
1115 db.commit()
1116
1117 # Inspect the tables we need
1118 user = inspect_table(metadata, "core__users")
1119 collections = inspect_table(metadata, "core__collections")
1120 media_entry = inspect_table(metadata, "core__media_entries")
1121 media_comments = inspect_table(metadata, "core__media_comments")
1122
1123 # Now add location support to the various models
1124 col = Column("location", Integer, ForeignKey(Location_V0.id))
1125 col.create(user)
1126
1127 col = Column("location", Integer, ForeignKey(Location_V0.id))
1128 col.create(collections)
1129
1130 col = Column("location", Integer, ForeignKey(Location_V0.id))
1131 col.create(media_entry)
1132
1133 col = Column("location", Integer, ForeignKey(Location_V0.id))
1134 col.create(media_comments)
1135
1136 db.commit()
1137
1138 @RegisterMigration(26, MIGRATIONS)
1139 def datetime_to_utc(db):
1140 """ Convert datetime stamps to UTC """
1141 # Get the server's timezone, this is what the database has stored
1142 server_timezone = dateutil.tz.tzlocal()
1143
1144 ##
1145 # Look up all the timestamps and convert them to UTC
1146 ##
1147 metadata = MetaData(bind=db.bind)
1148
1149 def dt_to_utc(dt):
1150 # Add the current timezone
1151 dt = dt.replace(tzinfo=server_timezone)
1152
1153 # Convert to UTC
1154 return dt.astimezone(pytz.UTC)
1155
1156 # Convert the User model
1157 user_table = inspect_table(metadata, "core__users")
1158 for user in db.execute(user_table.select()):
1159 db.execute(user_table.update().values(
1160 created=dt_to_utc(user.created)
1161 ).where(user_table.c.id==user.id))
1162
1163 # Convert Client
1164 client_table = inspect_table(metadata, "core__clients")
1165 for client in db.execute(client_table.select()):
1166 db.execute(client_table.update().values(
1167 created=dt_to_utc(client.created),
1168 updated=dt_to_utc(client.updated)
1169 ).where(client_table.c.id==client.id))
1170
1171 # Convert RequestToken
1172 rt_table = inspect_table(metadata, "core__request_tokens")
1173 for request_token in db.execute(rt_table.select()):
1174 db.execute(rt_table.update().values(
1175 created=dt_to_utc(request_token.created),
1176 updated=dt_to_utc(request_token.updated)
1177 ).where(rt_table.c.token==request_token.token))
1178
1179 # Convert AccessToken
1180 at_table = inspect_table(metadata, "core__access_tokens")
1181 for access_token in db.execute(at_table.select()):
1182 db.execute(at_table.update().values(
1183 created=dt_to_utc(access_token.created),
1184 updated=dt_to_utc(access_token.updated)
1185 ).where(at_table.c.token==access_token.token))
1186
1187 # Convert MediaEntry
1188 media_table = inspect_table(metadata, "core__media_entries")
1189 for media in db.execute(media_table.select()):
1190 db.execute(media_table.update().values(
1191 created=dt_to_utc(media.created)
1192 ).where(media_table.c.id==media.id))
1193
1194 # Convert Media Attachment File
1195 media_attachment_table = inspect_table(metadata, "core__attachment_files")
1196 for ma in db.execute(media_attachment_table.select()):
1197 db.execute(media_attachment_table.update().values(
1198 created=dt_to_utc(ma.created)
1199 ).where(media_attachment_table.c.id==ma.id))
1200
1201 # Convert MediaComment
1202 comment_table = inspect_table(metadata, "core__media_comments")
1203 for comment in db.execute(comment_table.select()):
1204 db.execute(comment_table.update().values(
1205 created=dt_to_utc(comment.created)
1206 ).where(comment_table.c.id==comment.id))
1207
1208 # Convert Collection
1209 collection_table = inspect_table(metadata, "core__collections")
1210 for collection in db.execute(collection_table.select()):
1211 db.execute(collection_table.update().values(
1212 created=dt_to_utc(collection.created)
1213 ).where(collection_table.c.id==collection.id))
1214
1215 # Convert Collection Item
1216 collection_item_table = inspect_table(metadata, "core__collection_items")
1217 for ci in db.execute(collection_item_table.select()):
1218 db.execute(collection_item_table.update().values(
1219 added=dt_to_utc(ci.added)
1220 ).where(collection_item_table.c.id==ci.id))
1221
1222 # Convert Comment subscription
1223 comment_sub = inspect_table(metadata, "core__comment_subscriptions")
1224 for sub in db.execute(comment_sub.select()):
1225 db.execute(comment_sub.update().values(
1226 created=dt_to_utc(sub.created)
1227 ).where(comment_sub.c.id==sub.id))
1228
1229 # Convert Notification
1230 notification_table = inspect_table(metadata, "core__notifications")
1231 for notification in db.execute(notification_table.select()):
1232 db.execute(notification_table.update().values(
1233 created=dt_to_utc(notification.created)
1234 ).where(notification_table.c.id==notification.id))
1235
1236 # Convert ReportBase
1237 reportbase_table = inspect_table(metadata, "core__reports")
1238 for report in db.execute(reportbase_table.select()):
1239 db.execute(reportbase_table.update().values(
1240 created=dt_to_utc(report.created)
1241 ).where(reportbase_table.c.id==report.id))
1242
1243 # Convert Generator
1244 generator_table = inspect_table(metadata, "core__generators")
1245 for generator in db.execute(generator_table.select()):
1246 db.execute(generator_table.update().values(
1247 published=dt_to_utc(generator.published),
1248 updated=dt_to_utc(generator.updated)
1249 ).where(generator_table.c.id==generator.id))
1250
1251 # Convert Activity
1252 activity_table = inspect_table(metadata, "core__activities")
1253 for activity in db.execute(activity_table.select()):
1254 db.execute(activity_table.update().values(
1255 published=dt_to_utc(activity.published),
1256 updated=dt_to_utc(activity.updated)
1257 ).where(activity_table.c.id==activity.id))
1258
1259 # Commit this to the database
1260 db.commit()
1261
1262 ##
1263 # Migrations to handle migrating from activity specific foreign key to the
1264 # new GenericForeignKey implementations. They have been split up to improve
1265 # readability and minimise errors
1266 ##
1267
1268 class GenericModelReference_V0(declarative_base()):
1269 __tablename__ = "core__generic_model_reference"
1270
1271 id = Column(Integer, primary_key=True)
1272 obj_pk = Column(Integer, nullable=False)
1273 model_type = Column(Unicode, nullable=False)
1274
1275 @RegisterMigration(27, MIGRATIONS)
1276 def create_generic_model_reference(db):
1277 """ Creates the Generic Model Reference table """
1278 GenericModelReference_V0.__table__.create(db.bind)
1279 db.commit()
1280
1281 @RegisterMigration(28, MIGRATIONS)
1282 def add_foreign_key_fields(db):
1283 """
1284 Add the fields for GenericForeignKey to the model under temporary name,
1285 this is so that later a data migration can occur. They will be renamed to
1286 the origional names.
1287 """
1288 metadata = MetaData(bind=db.bind)
1289 activity_table = inspect_table(metadata, "core__activities")
1290
1291 # Create column and add to model.
1292 object_column = Column("temp_object", Integer, GenericForeignKey())
1293 object_column.create(activity_table)
1294
1295 target_column = Column("temp_target", Integer, GenericForeignKey())
1296 target_column.create(activity_table)
1297
1298 # Commit this to the database
1299 db.commit()
1300
1301 @RegisterMigration(29, MIGRATIONS)
1302 def migrate_data_foreign_keys(db):
1303 """
1304 This will migrate the data from the old object and target attributes which
1305 use the old ActivityIntermediator to the new temparay fields which use the
1306 new GenericForeignKey.
1307 """
1308 metadata = MetaData(bind=db.bind)
1309 activity_table = inspect_table(metadata, "core__activities")
1310 ai_table = inspect_table(metadata, "core__activity_intermediators")
1311 gmr_table = inspect_table(metadata, "core__generic_model_reference")
1312
1313
1314 # Iterate through all activities doing the migration per activity.
1315 for activity in db.execute(activity_table.select()):
1316 # First do the "Activity.object" migration to "Activity.temp_object"
1317 # I need to get the object from the Activity, I can't use the old
1318 # Activity.get_object as we're in a migration.
1319 object_ai = db.execute(ai_table.select(
1320 ai_table.c.id==activity.object
1321 )).first()
1322
1323 object_ai_type = ActivityIntermediator_R0.TYPES[object_ai.type]
1324 object_ai_table = inspect_table(metadata, object_ai_type.__tablename__)
1325
1326 activity_object = db.execute(object_ai_table.select(
1327 object_ai_table.c.activity==object_ai.id
1328 )).first()
1329
1330 # now we need to create the GenericModelReference
1331 object_gmr = db.execute(gmr_table.insert().values(
1332 obj_pk=activity_object.id,
1333 model_type=object_ai_type.__tablename__
1334 ))
1335
1336 # Now set the ID of the GenericModelReference in the GenericForignKey
1337 db.execute(activity_table.update().values(
1338 temp_object=object_gmr.inserted_primary_key[0]
1339 ))
1340
1341 # Now do same process for "Activity.target" to "Activity.temp_target"
1342 # not all Activities have a target so if it doesn't just skip the rest
1343 # of this.
1344 if activity.target is None:
1345 continue
1346
1347 # Now get the target for the activity.
1348 target_ai = db.execute(ai_table.select(
1349 ai_table.c.id==activity.target
1350 )).first()
1351
1352 target_ai_type = ActivityIntermediator_R0.TYPES[target_ai.type]
1353 target_ai_table = inspect_table(metadata, target_ai_type.__tablename__)
1354
1355 activity_target = db.execute(target_ai_table.select(
1356 target_ai_table.c.activity==target_ai.id
1357 )).first()
1358
1359 # We now want to create the new target GenericModelReference
1360 target_gmr = db.execute(gmr_table.insert().values(
1361 obj_pk=activity_target.id,
1362 model_type=target_ai_type.__tablename__
1363 ))
1364
1365 # Now set the ID of the GenericModelReference in the GenericForignKey
1366 db.execute(activity_table.update().values(
1367 temp_object=target_gmr.inserted_primary_key[0]
1368 ))
1369
1370 # Commit to the database.
1371 db.commit()
1372
1373 @RegisterMigration(30, MIGRATIONS)
1374 def rename_and_remove_object_and_target(db):
1375 """
1376 Renames the new Activity.object and Activity.target fields and removes the
1377 old ones.
1378 """
1379 metadata = MetaData(bind=db.bind)
1380 activity_table = inspect_table(metadata, "core__activities")
1381
1382 # Firstly lets remove the old fields.
1383 old_object_column = activity_table.columns["object"]
1384 old_target_column = activity_table.columns["target"]
1385
1386 # Drop the tables.
1387 old_object_column.drop()
1388 old_target_column.drop()
1389
1390 # Now get the new columns.
1391 new_object_column = activity_table.columns["temp_object"]
1392 new_target_column = activity_table.columns["temp_target"]
1393
1394 # rename them to the old names.
1395 new_object_column.alter(name="object_id")
1396 new_target_column.alter(name="target_id")
1397
1398 # Commit the changes to the database.
1399 db.commit()
1400
1401 @RegisterMigration(31, MIGRATIONS)
1402 def remove_activityintermediator(db):
1403 """
1404 This removes the old specific ActivityIntermediator model which has been
1405 superseeded by the GenericForeignKey field.
1406 """
1407 metadata = MetaData(bind=db.bind)
1408
1409 # Drop the table
1410 ai_table = inspect_table(metadata, "core__activity_intermediators")
1411 ai_table.drop()
1412
1413 # Commit the changes
1414 db.commit()