Fix #5344 - OAuth NotImplemented exception
[mediagoblin.git] / mediagoblin / db / migrations.py
1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 import datetime
18 import uuid
19
20 import six
21
22 if six.PY2:
23 import migrate
24
25 import pytz
26 import dateutil.tz
27 from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
28 Integer, Unicode, UnicodeText, DateTime,
29 ForeignKey, Date, Index)
30 from sqlalchemy.exc import ProgrammingError
31 from sqlalchemy.ext.declarative import declarative_base
32 from sqlalchemy.sql import and_
33 from sqlalchemy.schema import UniqueConstraint
34
35 from mediagoblin import oauth
36 from mediagoblin.tools import crypto
37 from mediagoblin.db.extratypes import JSONEncoded, MutationDict
38 from mediagoblin.db.migration_tools import (
39 RegisterMigration, inspect_table, replace_table_hack)
40 from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
41 Privilege, Generator, LocalUser, Location)
42 from mediagoblin.db.extratypes import JSONEncoded, MutationDict
43
44
45 MIGRATIONS = {}
46
47
48 @RegisterMigration(1, MIGRATIONS)
49 def ogg_to_webm_audio(db_conn):
50 metadata = MetaData(bind=db_conn.bind)
51
52 file_keynames = Table('core__file_keynames', metadata, autoload=True,
53 autoload_with=db_conn.bind)
54
55 db_conn.execute(
56 file_keynames.update().where(file_keynames.c.name == 'ogg').
57 values(name='webm_audio')
58 )
59 db_conn.commit()
60
61
62 @RegisterMigration(2, MIGRATIONS)
63 def add_wants_notification_column(db_conn):
64 metadata = MetaData(bind=db_conn.bind)
65
66 users = Table('core__users', metadata, autoload=True,
67 autoload_with=db_conn.bind)
68
69 col = Column('wants_comment_notification', Boolean,
70 default=True, nullable=True)
71 col.create(users, populate_defaults=True)
72 db_conn.commit()
73
74
75 @RegisterMigration(3, MIGRATIONS)
76 def add_transcoding_progress(db_conn):
77 metadata = MetaData(bind=db_conn.bind)
78
79 media_entry = inspect_table(metadata, 'core__media_entries')
80
81 col = Column('transcoding_progress', SmallInteger)
82 col.create(media_entry)
83 db_conn.commit()
84
85
86 class Collection_v0(declarative_base()):
87 __tablename__ = "core__collections"
88
89 id = Column(Integer, primary_key=True)
90 title = Column(Unicode, nullable=False)
91 slug = Column(Unicode)
92 created = Column(DateTime, nullable=False, default=datetime.datetime.now,
93 index=True)
94 description = Column(UnicodeText)
95 creator = Column(Integer, ForeignKey(User.id), nullable=False)
96 items = Column(Integer, default=0)
97
98 class CollectionItem_v0(declarative_base()):
99 __tablename__ = "core__collection_items"
100
101 id = Column(Integer, primary_key=True)
102 media_entry = Column(
103 Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
104 collection = Column(Integer, ForeignKey(Collection.id), nullable=False)
105 note = Column(UnicodeText, nullable=True)
106 added = Column(DateTime, nullable=False, default=datetime.datetime.now)
107 position = Column(Integer)
108
109 ## This should be activated, normally.
110 ## But this would change the way the next migration used to work.
111 ## So it's commented for now.
112 __table_args__ = (
113 UniqueConstraint('collection', 'media_entry'),
114 {})
115
116 collectionitem_unique_constraint_done = False
117
118 @RegisterMigration(4, MIGRATIONS)
119 def add_collection_tables(db_conn):
120 Collection_v0.__table__.create(db_conn.bind)
121 CollectionItem_v0.__table__.create(db_conn.bind)
122
123 global collectionitem_unique_constraint_done
124 collectionitem_unique_constraint_done = True
125
126 db_conn.commit()
127
128
129 @RegisterMigration(5, MIGRATIONS)
130 def add_mediaentry_collected(db_conn):
131 metadata = MetaData(bind=db_conn.bind)
132
133 media_entry = inspect_table(metadata, 'core__media_entries')
134
135 col = Column('collected', Integer, default=0)
136 col.create(media_entry)
137 db_conn.commit()
138
139
140 class ProcessingMetaData_v0(declarative_base()):
141 __tablename__ = 'core__processing_metadata'
142
143 id = Column(Integer, primary_key=True)
144 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False,
145 index=True)
146 callback_url = Column(Unicode)
147
148 @RegisterMigration(6, MIGRATIONS)
149 def create_processing_metadata_table(db):
150 ProcessingMetaData_v0.__table__.create(db.bind)
151 db.commit()
152
153
154 # Okay, problem being:
155 # Migration #4 forgot to add the uniqueconstraint for the
156 # new tables. While creating the tables from scratch had
157 # the constraint enabled.
158 #
159 # So we have four situations that should end up at the same
160 # db layout:
161 #
162 # 1. Fresh install.
163 # Well, easy. Just uses the tables in models.py
164 # 2. Fresh install using a git version just before this migration
165 # The tables are all there, the unique constraint is also there.
166 # This migration should do nothing.
167 # But as we can't detect the uniqueconstraint easily,
168 # this migration just adds the constraint again.
169 # And possibly fails very loud. But ignores the failure.
170 # 3. old install, not using git, just releases.
171 # This one will get the new tables in #4 (now with constraint!)
172 # And this migration is just skipped silently.
173 # 4. old install, always on latest git.
174 # This one has the tables, but lacks the constraint.
175 # So this migration adds the constraint.
176 @RegisterMigration(7, MIGRATIONS)
177 def fix_CollectionItem_v0_constraint(db_conn):
178 """Add the forgotten Constraint on CollectionItem"""
179
180 global collectionitem_unique_constraint_done
181 if collectionitem_unique_constraint_done:
182 # Reset it. Maybe the whole thing gets run again
183 # For a different db?
184 collectionitem_unique_constraint_done = False
185 return
186
187 metadata = MetaData(bind=db_conn.bind)
188
189 CollectionItem_table = inspect_table(metadata, 'core__collection_items')
190
191 constraint = UniqueConstraint('collection', 'media_entry',
192 name='core__collection_items_collection_media_entry_key',
193 table=CollectionItem_table)
194
195 try:
196 constraint.create()
197 except ProgrammingError:
198 # User probably has an install that was run since the
199 # collection tables were added, so we don't need to run this migration.
200 pass
201
202 db_conn.commit()
203
204
205 @RegisterMigration(8, MIGRATIONS)
206 def add_license_preference(db):
207 metadata = MetaData(bind=db.bind)
208
209 user_table = inspect_table(metadata, 'core__users')
210
211 col = Column('license_preference', Unicode)
212 col.create(user_table)
213 db.commit()
214
215
216 @RegisterMigration(9, MIGRATIONS)
217 def mediaentry_new_slug_era(db):
218 """
219 Update for the new era for media type slugs.
220
221 Entries without slugs now display differently in the url like:
222 /u/cwebber/m/id=251/
223
224 ... because of this, we should back-convert:
225 - entries without slugs should be converted to use the id, if possible, to
226 make old urls still work
227 - slugs with = (or also : which is now also not allowed) to have those
228 stripped out (small possibility of breakage here sadly)
229 """
230
231 def slug_and_user_combo_exists(slug, uploader):
232 return db.execute(
233 media_table.select(
234 and_(media_table.c.uploader==uploader,
235 media_table.c.slug==slug))).first() is not None
236
237 def append_garbage_till_unique(row, new_slug):
238 """
239 Attach junk to this row until it's unique, then save it
240 """
241 if slug_and_user_combo_exists(new_slug, row.uploader):
242 # okay, still no success;
243 # let's whack junk on there till it's unique.
244 new_slug += '-' + uuid.uuid4().hex[:4]
245 # keep going if necessary!
246 while slug_and_user_combo_exists(new_slug, row.uploader):
247 new_slug += uuid.uuid4().hex[:4]
248
249 db.execute(
250 media_table.update(). \
251 where(media_table.c.id==row.id). \
252 values(slug=new_slug))
253
254 metadata = MetaData(bind=db.bind)
255
256 media_table = inspect_table(metadata, 'core__media_entries')
257
258 for row in db.execute(media_table.select()):
259 # no slug, try setting to an id
260 if not row.slug:
261 append_garbage_till_unique(row, six.text_type(row.id))
262 # has "=" or ":" in it... we're getting rid of those
263 elif u"=" in row.slug or u":" in row.slug:
264 append_garbage_till_unique(
265 row, row.slug.replace(u"=", u"-").replace(u":", u"-"))
266
267 db.commit()
268
269
270 @RegisterMigration(10, MIGRATIONS)
271 def unique_collections_slug(db):
272 """Add unique constraint to collection slug"""
273 metadata = MetaData(bind=db.bind)
274 collection_table = inspect_table(metadata, "core__collections")
275 existing_slugs = {}
276 slugs_to_change = []
277
278 for row in db.execute(collection_table.select()):
279 # if duplicate slug, generate a unique slug
280 if row.creator in existing_slugs and row.slug in \
281 existing_slugs[row.creator]:
282 slugs_to_change.append(row.id)
283 else:
284 if not row.creator in existing_slugs:
285 existing_slugs[row.creator] = [row.slug]
286 else:
287 existing_slugs[row.creator].append(row.slug)
288
289 for row_id in slugs_to_change:
290 new_slug = six.text_type(uuid.uuid4())
291 db.execute(collection_table.update().
292 where(collection_table.c.id == row_id).
293 values(slug=new_slug))
294 # sqlite does not like to change the schema when a transaction(update) is
295 # not yet completed
296 db.commit()
297
298 constraint = UniqueConstraint('creator', 'slug',
299 name='core__collection_creator_slug_key',
300 table=collection_table)
301 constraint.create()
302
303 db.commit()
304
305 @RegisterMigration(11, MIGRATIONS)
306 def drop_token_related_User_columns(db):
307 """
308 Drop unneeded columns from the User table after switching to using
309 itsdangerous tokens for email and forgot password verification.
310 """
311 metadata = MetaData(bind=db.bind)
312 user_table = inspect_table(metadata, 'core__users')
313
314 verification_key = user_table.columns['verification_key']
315 fp_verification_key = user_table.columns['fp_verification_key']
316 fp_token_expire = user_table.columns['fp_token_expire']
317
318 verification_key.drop()
319 fp_verification_key.drop()
320 fp_token_expire.drop()
321
322 db.commit()
323
324
325 class CommentSubscription_v0(declarative_base()):
326 __tablename__ = 'core__comment_subscriptions'
327 id = Column(Integer, primary_key=True)
328
329 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
330
331 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
332
333 user_id = Column(Integer, ForeignKey(User.id), nullable=False)
334
335 notify = Column(Boolean, nullable=False, default=True)
336 send_email = Column(Boolean, nullable=False, default=True)
337
338
339 class Notification_v0(declarative_base()):
340 __tablename__ = 'core__notifications'
341 id = Column(Integer, primary_key=True)
342 type = Column(Unicode)
343
344 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
345
346 user_id = Column(Integer, ForeignKey(User.id), nullable=False,
347 index=True)
348 seen = Column(Boolean, default=lambda: False, index=True)
349
350
351 class CommentNotification_v0(Notification_v0):
352 __tablename__ = 'core__comment_notifications'
353 id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
354
355 subject_id = Column(Integer, ForeignKey(MediaComment.id))
356
357
358 class ProcessingNotification_v0(Notification_v0):
359 __tablename__ = 'core__processing_notifications'
360
361 id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
362
363 subject_id = Column(Integer, ForeignKey(MediaEntry.id))
364
365
366 @RegisterMigration(12, MIGRATIONS)
367 def add_new_notification_tables(db):
368 metadata = MetaData(bind=db.bind)
369
370 user_table = inspect_table(metadata, 'core__users')
371 mediaentry_table = inspect_table(metadata, 'core__media_entries')
372 mediacomment_table = inspect_table(metadata, 'core__media_comments')
373
374 CommentSubscription_v0.__table__.create(db.bind)
375
376 Notification_v0.__table__.create(db.bind)
377 CommentNotification_v0.__table__.create(db.bind)
378 ProcessingNotification_v0.__table__.create(db.bind)
379
380 db.commit()
381
382
383 @RegisterMigration(13, MIGRATIONS)
384 def pw_hash_nullable(db):
385 """Make pw_hash column nullable"""
386 metadata = MetaData(bind=db.bind)
387 user_table = inspect_table(metadata, "core__users")
388
389 user_table.c.pw_hash.alter(nullable=True)
390
391 # sqlite+sqlalchemy seems to drop this constraint during the
392 # migration, so we add it back here for now a bit manually.
393 if db.bind.url.drivername == 'sqlite':
394 constraint = UniqueConstraint('username', table=user_table)
395 constraint.create()
396
397 db.commit()
398
399
400 # oauth1 migrations
401 class Client_v0(declarative_base()):
402 """
403 Model representing a client - Used for API Auth
404 """
405 __tablename__ = "core__clients"
406
407 id = Column(Unicode, nullable=True, primary_key=True)
408 secret = Column(Unicode, nullable=False)
409 expirey = Column(DateTime, nullable=True)
410 application_type = Column(Unicode, nullable=False)
411 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
412 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
413
414 # optional stuff
415 redirect_uri = Column(JSONEncoded, nullable=True)
416 logo_url = Column(Unicode, nullable=True)
417 application_name = Column(Unicode, nullable=True)
418 contacts = Column(JSONEncoded, nullable=True)
419
420 def __repr__(self):
421 if self.application_name:
422 return "<Client {0} - {1}>".format(self.application_name, self.id)
423 else:
424 return "<Client {0}>".format(self.id)
425
426 class RequestToken_v0(declarative_base()):
427 """
428 Model for representing the request tokens
429 """
430 __tablename__ = "core__request_tokens"
431
432 token = Column(Unicode, primary_key=True)
433 secret = Column(Unicode, nullable=False)
434 client = Column(Unicode, ForeignKey(Client_v0.id))
435 user = Column(Integer, ForeignKey(User.id), nullable=True)
436 used = Column(Boolean, default=False)
437 authenticated = Column(Boolean, default=False)
438 verifier = Column(Unicode, nullable=True)
439 callback = Column(Unicode, nullable=False, default=u"oob")
440 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
441 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
442
443 class AccessToken_v0(declarative_base()):
444 """
445 Model for representing the access tokens
446 """
447 __tablename__ = "core__access_tokens"
448
449 token = Column(Unicode, nullable=False, primary_key=True)
450 secret = Column(Unicode, nullable=False)
451 user = Column(Integer, ForeignKey(User.id))
452 request_token = Column(Unicode, ForeignKey(RequestToken_v0.token))
453 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
454 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
455
456
457 class NonceTimestamp_v0(declarative_base()):
458 """
459 A place the timestamp and nonce can be stored - this is for OAuth1
460 """
461 __tablename__ = "core__nonce_timestamps"
462
463 nonce = Column(Unicode, nullable=False, primary_key=True)
464 timestamp = Column(DateTime, nullable=False, primary_key=True)
465
466
467 @RegisterMigration(14, MIGRATIONS)
468 def create_oauth1_tables(db):
469 """ Creates the OAuth1 tables """
470
471 Client_v0.__table__.create(db.bind)
472 RequestToken_v0.__table__.create(db.bind)
473 AccessToken_v0.__table__.create(db.bind)
474 NonceTimestamp_v0.__table__.create(db.bind)
475
476 db.commit()
477
478 @RegisterMigration(15, MIGRATIONS)
479 def wants_notifications(db):
480 """Add a wants_notifications field to User model"""
481 metadata = MetaData(bind=db.bind)
482 user_table = inspect_table(metadata, "core__users")
483 col = Column('wants_notifications', Boolean, default=True)
484 col.create(user_table)
485 db.commit()
486
487
488
489 @RegisterMigration(16, MIGRATIONS)
490 def upload_limits(db):
491 """Add user upload limit columns"""
492 metadata = MetaData(bind=db.bind)
493
494 user_table = inspect_table(metadata, 'core__users')
495 media_entry_table = inspect_table(metadata, 'core__media_entries')
496
497 col = Column('uploaded', Integer, default=0)
498 col.create(user_table)
499
500 col = Column('upload_limit', Integer)
501 col.create(user_table)
502
503 col = Column('file_size', Integer, default=0)
504 col.create(media_entry_table)
505
506 db.commit()
507
508
509 @RegisterMigration(17, MIGRATIONS)
510 def add_file_metadata(db):
511 """Add file_metadata to MediaFile"""
512 metadata = MetaData(bind=db.bind)
513 media_file_table = inspect_table(metadata, "core__mediafiles")
514
515 col = Column('file_metadata', MutationDict.as_mutable(JSONEncoded))
516 col.create(media_file_table)
517
518 db.commit()
519
520 ###################
521 # Moderation tables
522 ###################
523
524 class ReportBase_v0(declarative_base()):
525 __tablename__ = 'core__reports'
526 id = Column(Integer, primary_key=True)
527 reporter_id = Column(Integer, ForeignKey(User.id), nullable=False)
528 report_content = Column(UnicodeText)
529 reported_user_id = Column(Integer, ForeignKey(User.id), nullable=False)
530 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
531 discriminator = Column('type', Unicode(50))
532 resolver_id = Column(Integer, ForeignKey(User.id))
533 resolved = Column(DateTime)
534 result = Column(UnicodeText)
535 __mapper_args__ = {'polymorphic_on': discriminator}
536
537
538 class CommentReport_v0(ReportBase_v0):
539 __tablename__ = 'core__reports_on_comments'
540 __mapper_args__ = {'polymorphic_identity': 'comment_report'}
541
542 id = Column('id',Integer, ForeignKey('core__reports.id'),
543 primary_key=True)
544 comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
545
546
547 class MediaReport_v0(ReportBase_v0):
548 __tablename__ = 'core__reports_on_media'
549 __mapper_args__ = {'polymorphic_identity': 'media_report'}
550
551 id = Column('id',Integer, ForeignKey('core__reports.id'), primary_key=True)
552 media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=True)
553
554
555 class UserBan_v0(declarative_base()):
556 __tablename__ = 'core__user_bans'
557 user_id = Column(Integer, ForeignKey(User.id), nullable=False,
558 primary_key=True)
559 expiration_date = Column(Date)
560 reason = Column(UnicodeText, nullable=False)
561
562
563 class Privilege_v0(declarative_base()):
564 __tablename__ = 'core__privileges'
565 id = Column(Integer, nullable=False, primary_key=True, unique=True)
566 privilege_name = Column(Unicode, nullable=False, unique=True)
567
568
569 class PrivilegeUserAssociation_v0(declarative_base()):
570 __tablename__ = 'core__privileges_users'
571 privilege_id = Column(
572 'core__privilege_id',
573 Integer,
574 ForeignKey(User.id),
575 primary_key=True)
576 user_id = Column(
577 'core__user_id',
578 Integer,
579 ForeignKey(Privilege.id),
580 primary_key=True)
581
582
583 PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':u'admin'},
584 {'privilege_name':u'moderator'},
585 {'privilege_name':u'uploader'},
586 {'privilege_name':u'reporter'},
587 {'privilege_name':u'commenter'},
588 {'privilege_name':u'active'}]
589
590 # vR1 stands for "version Rename 1". This only exists because we need
591 # to deal with dropping some booleans and it's otherwise impossible
592 # with sqlite.
593
594 class User_vR1(declarative_base()):
595 __tablename__ = 'rename__users'
596 id = Column(Integer, primary_key=True)
597 username = Column(Unicode, nullable=False, unique=True)
598 email = Column(Unicode, nullable=False)
599 pw_hash = Column(Unicode)
600 created = Column(DateTime, nullable=False, default=datetime.datetime.now)
601 wants_comment_notification = Column(Boolean, default=True)
602 wants_notifications = Column(Boolean, default=True)
603 license_preference = Column(Unicode)
604 url = Column(Unicode)
605 bio = Column(UnicodeText) # ??
606 uploaded = Column(Integer, default=0)
607 upload_limit = Column(Integer)
608
609
610 @RegisterMigration(18, MIGRATIONS)
611 def create_moderation_tables(db):
612
613 # First, we will create the new tables in the database.
614 #--------------------------------------------------------------------------
615 ReportBase_v0.__table__.create(db.bind)
616 CommentReport_v0.__table__.create(db.bind)
617 MediaReport_v0.__table__.create(db.bind)
618 UserBan_v0.__table__.create(db.bind)
619 Privilege_v0.__table__.create(db.bind)
620 PrivilegeUserAssociation_v0.__table__.create(db.bind)
621
622 db.commit()
623
624 # Then initialize the tables that we will later use
625 #--------------------------------------------------------------------------
626 metadata = MetaData(bind=db.bind)
627 privileges_table= inspect_table(metadata, "core__privileges")
628 user_table = inspect_table(metadata, 'core__users')
629 user_privilege_assoc = inspect_table(
630 metadata, 'core__privileges_users')
631
632 # This section initializes the default Privilege foundations, that
633 # would be created through the FOUNDATIONS system in a new instance
634 #--------------------------------------------------------------------------
635 for parameters in PRIVILEGE_FOUNDATIONS_v0:
636 db.execute(privileges_table.insert().values(**parameters))
637
638 db.commit()
639
640 # This next section takes the information from the old is_admin and status
641 # columns and converts those to the new privilege system
642 #--------------------------------------------------------------------------
643 admin_users_ids, active_users_ids, inactive_users_ids = (
644 db.execute(
645 user_table.select().where(
646 user_table.c.is_admin==True)).fetchall(),
647 db.execute(
648 user_table.select().where(
649 user_table.c.is_admin==False).where(
650 user_table.c.status==u"active")).fetchall(),
651 db.execute(
652 user_table.select().where(
653 user_table.c.is_admin==False).where(
654 user_table.c.status!=u"active")).fetchall())
655
656 # Get the ids for each of the privileges so we can reference them ~~~~~~~~~
657 (admin_privilege_id, uploader_privilege_id,
658 reporter_privilege_id, commenter_privilege_id,
659 active_privilege_id) = [
660 db.execute(privileges_table.select().where(
661 privileges_table.c.privilege_name==privilege_name)).first()['id']
662 for privilege_name in
663 [u"admin",u"uploader",u"reporter",u"commenter",u"active"]
664 ]
665
666 # Give each user the appopriate privileges depending whether they are an
667 # admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
668 for admin_user in admin_users_ids:
669 admin_user_id = admin_user['id']
670 for privilege_id in [admin_privilege_id, uploader_privilege_id,
671 reporter_privilege_id, commenter_privilege_id,
672 active_privilege_id]:
673 db.execute(user_privilege_assoc.insert().values(
674 core__privilege_id=admin_user_id,
675 core__user_id=privilege_id))
676
677 for active_user in active_users_ids:
678 active_user_id = active_user['id']
679 for privilege_id in [uploader_privilege_id, reporter_privilege_id,
680 commenter_privilege_id, active_privilege_id]:
681 db.execute(user_privilege_assoc.insert().values(
682 core__privilege_id=active_user_id,
683 core__user_id=privilege_id))
684
685 for inactive_user in inactive_users_ids:
686 inactive_user_id = inactive_user['id']
687 for privilege_id in [uploader_privilege_id, reporter_privilege_id,
688 commenter_privilege_id]:
689 db.execute(user_privilege_assoc.insert().values(
690 core__privilege_id=inactive_user_id,
691 core__user_id=privilege_id))
692
693 db.commit()
694
695 # And then, once the information is taken from is_admin & status columns
696 # we drop all of the vestigial columns from the User table.
697 #--------------------------------------------------------------------------
698 if db.bind.url.drivername == 'sqlite':
699 # SQLite has some issues that make it *impossible* to drop boolean
700 # columns. So, the following code is a very hacky workaround which
701 # makes it possible. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
702
703 User_vR1.__table__.create(db.bind)
704 db.commit()
705 new_user_table = inspect_table(metadata, 'rename__users')
706 replace_table_hack(db, user_table, new_user_table)
707 else:
708 # If the db is not run using SQLite, this process is much simpler ~~~~~
709
710 status = user_table.columns['status']
711 email_verified = user_table.columns['email_verified']
712 is_admin = user_table.columns['is_admin']
713 status.drop()
714 email_verified.drop()
715 is_admin.drop()
716
717 db.commit()
718
719
720 @RegisterMigration(19, MIGRATIONS)
721 def drop_MediaEntry_collected(db):
722 """
723 Drop unused MediaEntry.collected column
724 """
725 metadata = MetaData(bind=db.bind)
726
727 media_collected= inspect_table(metadata, 'core__media_entries')
728 media_collected = media_collected.columns['collected']
729
730 media_collected.drop()
731
732 db.commit()
733
734
735 @RegisterMigration(20, MIGRATIONS)
736 def add_metadata_column(db):
737 metadata = MetaData(bind=db.bind)
738
739 media_entry = inspect_table(metadata, 'core__media_entries')
740
741 col = Column('media_metadata', MutationDict.as_mutable(JSONEncoded),
742 default=MutationDict())
743 col.create(media_entry)
744
745 db.commit()
746
747
748 class PrivilegeUserAssociation_R1(declarative_base()):
749 __tablename__ = 'rename__privileges_users'
750 user = Column(
751 "user",
752 Integer,
753 ForeignKey(User.id),
754 primary_key=True)
755 privilege = Column(
756 "privilege",
757 Integer,
758 ForeignKey(Privilege.id),
759 primary_key=True)
760
761 @RegisterMigration(21, MIGRATIONS)
762 def fix_privilege_user_association_table(db):
763 """
764 There was an error in the PrivilegeUserAssociation table that allowed for a
765 dangerous sql error. We need to the change the name of the columns to be
766 unique, and properly referenced.
767 """
768 metadata = MetaData(bind=db.bind)
769
770 privilege_user_assoc = inspect_table(
771 metadata, 'core__privileges_users')
772
773 # This whole process is more complex if we're dealing with sqlite
774 if db.bind.url.drivername == 'sqlite':
775 PrivilegeUserAssociation_R1.__table__.create(db.bind)
776 db.commit()
777
778 new_privilege_user_assoc = inspect_table(
779 metadata, 'rename__privileges_users')
780 result = db.execute(privilege_user_assoc.select())
781 for row in result:
782 # The columns were improperly named before, so we switch the columns
783 user_id, priv_id = row['core__privilege_id'], row['core__user_id']
784 db.execute(new_privilege_user_assoc.insert().values(
785 user=user_id,
786 privilege=priv_id))
787
788 db.commit()
789
790 privilege_user_assoc.drop()
791 new_privilege_user_assoc.rename('core__privileges_users')
792
793 # much simpler if postgres though!
794 else:
795 privilege_user_assoc.c.core__user_id.alter(name="privilege")
796 privilege_user_assoc.c.core__privilege_id.alter(name="user")
797
798 db.commit()
799
800
801 @RegisterMigration(22, MIGRATIONS)
802 def add_index_username_field(db):
803 """
804 This migration has been found to be doing the wrong thing. See
805 the documentation in migration 23 (revert_username_index) below
806 which undoes this for those databases that did run this migration.
807
808 Old description:
809 This indexes the User.username field which is frequently queried
810 for example a user logging in. This solves the issue #894
811 """
812 ## This code is left commented out *on purpose!*
813 ##
814 ## We do not normally allow commented out code like this in
815 ## MediaGoblin but this is a special case: since this migration has
816 ## been nullified but with great work to set things back below,
817 ## this is commented out for historical clarity.
818 #
819 # metadata = MetaData(bind=db.bind)
820 # user_table = inspect_table(metadata, "core__users")
821 #
822 # new_index = Index("ix_core__users_uploader", user_table.c.username)
823 # new_index.create()
824 #
825 # db.commit()
826 pass
827
828
829 @RegisterMigration(23, MIGRATIONS)
830 def revert_username_index(db):
831 """
832 Revert the stuff we did in migration 22 above.
833
834 There were a couple of problems with what we did:
835 - There was never a need for this migration! The unique
836 constraint had an implicit b-tree index, so it wasn't really
837 needed. (This is my (Chris Webber's) fault for suggesting it
838 needed to happen without knowing what's going on... my bad!)
839 - On top of that, databases created after the models.py was
840 changed weren't the same as those that had been run through
841 migration 22 above.
842
843 As such, we're setting things back to the way they were before,
844 but as it turns out, that's tricky to do!
845 """
846 metadata = MetaData(bind=db.bind)
847 user_table = inspect_table(metadata, "core__users")
848 indexes = dict(
849 [(index.name, index) for index in user_table.indexes])
850
851 # index from unnecessary migration
852 users_uploader_index = indexes.get(u'ix_core__users_uploader')
853 # index created from models.py after (unique=True, index=True)
854 # was set in models.py
855 users_username_index = indexes.get(u'ix_core__users_username')
856
857 if users_uploader_index is None and users_username_index is None:
858 # We don't need to do anything.
859 # The database isn't in a state where it needs fixing
860 #
861 # (ie, either went through the previous borked migration or
862 # was initialized with a models.py where core__users was both
863 # unique=True and index=True)
864 return
865
866 if db.bind.url.drivername == 'sqlite':
867 # Again, sqlite has problems. So this is tricky.
868
869 # Yes, this is correct to use User_vR1! Nothing has changed
870 # between the *correct* version of this table and migration 18.
871 User_vR1.__table__.create(db.bind)
872 db.commit()
873 new_user_table = inspect_table(metadata, 'rename__users')
874 replace_table_hack(db, user_table, new_user_table)
875
876 else:
877 # If the db is not run using SQLite, we don't need to do crazy
878 # table copying.
879
880 # Remove whichever of the not-used indexes are in place
881 if users_uploader_index is not None:
882 users_uploader_index.drop()
883 if users_username_index is not None:
884 users_username_index.drop()
885
886 # Given we're removing indexes then adding a unique constraint
887 # which *we know might fail*, thus probably rolling back the
888 # session, let's commit here.
889 db.commit()
890
891 try:
892 # Add the unique constraint
893 constraint = UniqueConstraint(
894 'username', table=user_table)
895 constraint.create()
896 except ProgrammingError:
897 # constraint already exists, no need to add
898 db.rollback()
899
900 db.commit()
901
902 class Generator_R0(declarative_base()):
903 __tablename__ = "core__generators"
904 id = Column(Integer, primary_key=True)
905 name = Column(Unicode, nullable=False)
906 published = Column(DateTime, nullable=False, default=datetime.datetime.now)
907 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
908 object_type = Column(Unicode, nullable=False)
909
910 class ActivityIntermediator_R0(declarative_base()):
911 __tablename__ = "core__activity_intermediators"
912 id = Column(Integer, primary_key=True)
913 type = Column(Unicode, nullable=False)
914
915 # These are needed for migration 29
916 TYPES = {
917 "user": User,
918 "media": MediaEntry,
919 "comment": MediaComment,
920 "collection": Collection,
921 }
922
923 class Activity_R0(declarative_base()):
924 __tablename__ = "core__activities"
925 id = Column(Integer, primary_key=True)
926 actor = Column(Integer, ForeignKey(User.id), nullable=False)
927 published = Column(DateTime, nullable=False, default=datetime.datetime.now)
928 updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
929 verb = Column(Unicode, nullable=False)
930 content = Column(Unicode, nullable=True)
931 title = Column(Unicode, nullable=True)
932 generator = Column(Integer, ForeignKey(Generator_R0.id), nullable=True)
933 object = Column(Integer,
934 ForeignKey(ActivityIntermediator_R0.id),
935 nullable=False)
936 target = Column(Integer,
937 ForeignKey(ActivityIntermediator_R0.id),
938 nullable=True)
939
940
941 @RegisterMigration(24, MIGRATIONS)
942 def activity_migration(db):
943 """
944 Creates everything to create activities in GMG
945 - Adds Activity, ActivityIntermediator and Generator table
946 - Creates GMG service generator for activities produced by the server
947 - Adds the activity_as_object and activity_as_target to objects/targets
948 - Retroactively adds activities for what we can acurately work out
949 """
950 # Set constants we'll use later
951 FOREIGN_KEY = "core__activity_intermediators.id"
952 ACTIVITY_COLUMN = "activity"
953
954 # Create the new tables.
955 ActivityIntermediator_R0.__table__.create(db.bind)
956 Generator_R0.__table__.create(db.bind)
957 Activity_R0.__table__.create(db.bind)
958 db.commit()
959
960 # Initiate the tables we want to use later
961 metadata = MetaData(bind=db.bind)
962 user_table = inspect_table(metadata, "core__users")
963 activity_table = inspect_table(metadata, "core__activities")
964 generator_table = inspect_table(metadata, "core__generators")
965 collection_table = inspect_table(metadata, "core__collections")
966 media_entry_table = inspect_table(metadata, "core__media_entries")
967 media_comments_table = inspect_table(metadata, "core__media_comments")
968 ai_table = inspect_table(metadata, "core__activity_intermediators")
969
970
971 # Create the foundations for Generator
972 db.execute(generator_table.insert().values(
973 name="GNU Mediagoblin",
974 object_type="service",
975 published=datetime.datetime.now(),
976 updated=datetime.datetime.now()
977 ))
978 db.commit()
979
980 # Get the ID of that generator
981 gmg_generator = db.execute(generator_table.select(
982 generator_table.c.name==u"GNU Mediagoblin")).first()
983
984
985 # Now we want to modify the tables which MAY have an activity at some point
986 media_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
987 media_col.create(media_entry_table)
988
989 user_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
990 user_col.create(user_table)
991
992 comments_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
993 comments_col.create(media_comments_table)
994
995 collection_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
996 collection_col.create(collection_table)
997 db.commit()
998
999
1000 # Now we want to retroactively add what activities we can
1001 # first we'll add activities when people uploaded media.
1002 # these can't have content as it's not fesible to get the
1003 # correct content strings.
1004 for media in db.execute(media_entry_table.select()):
1005 # Now we want to create the intermedaitory
1006 db_ai = db.execute(ai_table.insert().values(
1007 type="media",
1008 ))
1009 db_ai = db.execute(ai_table.select(
1010 ai_table.c.id==db_ai.inserted_primary_key[0]
1011 )).first()
1012
1013 # Add the activity
1014 activity = {
1015 "verb": "create",
1016 "actor": media.uploader,
1017 "published": media.created,
1018 "updated": media.created,
1019 "generator": gmg_generator.id,
1020 "object": db_ai.id
1021 }
1022 db.execute(activity_table.insert().values(**activity))
1023
1024 # Add the AI to the media.
1025 db.execute(media_entry_table.update().values(
1026 activity=db_ai.id
1027 ).where(media_entry_table.c.id==media.id))
1028
1029 # Now we want to add all the comments people made
1030 for comment in db.execute(media_comments_table.select()):
1031 # Get the MediaEntry for the comment
1032 media_entry = db.execute(
1033 media_entry_table.select(
1034 media_entry_table.c.id==comment.media_entry
1035 )).first()
1036
1037 # Create an AI for target
1038 db_ai_media = db.execute(ai_table.select(
1039 ai_table.c.id==media_entry.activity
1040 )).first().id
1041
1042 db.execute(
1043 media_comments_table.update().values(
1044 activity=db_ai_media
1045 ).where(media_comments_table.c.id==media_entry.id))
1046
1047 # Now create the AI for the comment
1048 db_ai_comment = db.execute(ai_table.insert().values(
1049 type="comment"
1050 )).inserted_primary_key[0]
1051
1052 activity = {
1053 "verb": "comment",
1054 "actor": comment.author,
1055 "published": comment.created,
1056 "updated": comment.created,
1057 "generator": gmg_generator.id,
1058 "object": db_ai_comment,
1059 "target": db_ai_media,
1060 }
1061
1062 # Now add the comment object
1063 db.execute(activity_table.insert().values(**activity))
1064
1065 # Now add activity to comment
1066 db.execute(media_comments_table.update().values(
1067 activity=db_ai_comment
1068 ).where(media_comments_table.c.id==comment.id))
1069
1070 # Create 'create' activities for all collections
1071 for collection in db.execute(collection_table.select()):
1072 # create AI
1073 db_ai = db.execute(ai_table.insert().values(
1074 type="collection"
1075 ))
1076 db_ai = db.execute(ai_table.select(
1077 ai_table.c.id==db_ai.inserted_primary_key[0]
1078 )).first()
1079
1080 # Now add link the collection to the AI
1081 db.execute(collection_table.update().values(
1082 activity=db_ai.id
1083 ).where(collection_table.c.id==collection.id))
1084
1085 activity = {
1086 "verb": "create",
1087 "actor": collection.creator,
1088 "published": collection.created,
1089 "updated": collection.created,
1090 "generator": gmg_generator.id,
1091 "object": db_ai.id,
1092 }
1093
1094 db.execute(activity_table.insert().values(**activity))
1095
1096 # Now add the activity to the collection
1097 db.execute(collection_table.update().values(
1098 activity=db_ai.id
1099 ).where(collection_table.c.id==collection.id))
1100
1101 db.commit()
1102
1103 class Location_V0(declarative_base()):
1104 __tablename__ = "core__locations"
1105 id = Column(Integer, primary_key=True)
1106 name = Column(Unicode)
1107 position = Column(MutationDict.as_mutable(JSONEncoded))
1108 address = Column(MutationDict.as_mutable(JSONEncoded))
1109
1110 @RegisterMigration(25, MIGRATIONS)
1111 def add_location_model(db):
1112 """ Add location model """
1113 metadata = MetaData(bind=db.bind)
1114
1115 # Create location table
1116 Location_V0.__table__.create(db.bind)
1117 db.commit()
1118
1119 # Inspect the tables we need
1120 user = inspect_table(metadata, "core__users")
1121 collections = inspect_table(metadata, "core__collections")
1122 media_entry = inspect_table(metadata, "core__media_entries")
1123 media_comments = inspect_table(metadata, "core__media_comments")
1124
1125 # Now add location support to the various models
1126 col = Column("location", Integer, ForeignKey(Location_V0.id))
1127 col.create(user)
1128
1129 col = Column("location", Integer, ForeignKey(Location_V0.id))
1130 col.create(collections)
1131
1132 col = Column("location", Integer, ForeignKey(Location_V0.id))
1133 col.create(media_entry)
1134
1135 col = Column("location", Integer, ForeignKey(Location_V0.id))
1136 col.create(media_comments)
1137
1138 db.commit()
1139
1140 @RegisterMigration(26, MIGRATIONS)
1141 def datetime_to_utc(db):
1142 """ Convert datetime stamps to UTC """
1143 # Get the server's timezone, this is what the database has stored
1144 server_timezone = dateutil.tz.tzlocal()
1145
1146 ##
1147 # Look up all the timestamps and convert them to UTC
1148 ##
1149 metadata = MetaData(bind=db.bind)
1150
1151 def dt_to_utc(dt):
1152 # Add the current timezone
1153 dt = dt.replace(tzinfo=server_timezone)
1154
1155 # Convert to UTC
1156 return dt.astimezone(pytz.UTC)
1157
1158 # Convert the User model
1159 user_table = inspect_table(metadata, "core__users")
1160 for user in db.execute(user_table.select()):
1161 db.execute(user_table.update().values(
1162 created=dt_to_utc(user.created)
1163 ).where(user_table.c.id==user.id))
1164
1165 # Convert Client
1166 client_table = inspect_table(metadata, "core__clients")
1167 for client in db.execute(client_table.select()):
1168 db.execute(client_table.update().values(
1169 created=dt_to_utc(client.created),
1170 updated=dt_to_utc(client.updated)
1171 ).where(client_table.c.id==client.id))
1172
1173 # Convert RequestToken
1174 rt_table = inspect_table(metadata, "core__request_tokens")
1175 for request_token in db.execute(rt_table.select()):
1176 db.execute(rt_table.update().values(
1177 created=dt_to_utc(request_token.created),
1178 updated=dt_to_utc(request_token.updated)
1179 ).where(rt_table.c.token==request_token.token))
1180
1181 # Convert AccessToken
1182 at_table = inspect_table(metadata, "core__access_tokens")
1183 for access_token in db.execute(at_table.select()):
1184 db.execute(at_table.update().values(
1185 created=dt_to_utc(access_token.created),
1186 updated=dt_to_utc(access_token.updated)
1187 ).where(at_table.c.token==access_token.token))
1188
1189 # Convert MediaEntry
1190 media_table = inspect_table(metadata, "core__media_entries")
1191 for media in db.execute(media_table.select()):
1192 db.execute(media_table.update().values(
1193 created=dt_to_utc(media.created)
1194 ).where(media_table.c.id==media.id))
1195
1196 # Convert Media Attachment File
1197 media_attachment_table = inspect_table(metadata, "core__attachment_files")
1198 for ma in db.execute(media_attachment_table.select()):
1199 db.execute(media_attachment_table.update().values(
1200 created=dt_to_utc(ma.created)
1201 ).where(media_attachment_table.c.id==ma.id))
1202
1203 # Convert MediaComment
1204 comment_table = inspect_table(metadata, "core__media_comments")
1205 for comment in db.execute(comment_table.select()):
1206 db.execute(comment_table.update().values(
1207 created=dt_to_utc(comment.created)
1208 ).where(comment_table.c.id==comment.id))
1209
1210 # Convert Collection
1211 collection_table = inspect_table(metadata, "core__collections")
1212 for collection in db.execute(collection_table.select()):
1213 db.execute(collection_table.update().values(
1214 created=dt_to_utc(collection.created)
1215 ).where(collection_table.c.id==collection.id))
1216
1217 # Convert Collection Item
1218 collection_item_table = inspect_table(metadata, "core__collection_items")
1219 for ci in db.execute(collection_item_table.select()):
1220 db.execute(collection_item_table.update().values(
1221 added=dt_to_utc(ci.added)
1222 ).where(collection_item_table.c.id==ci.id))
1223
1224 # Convert Comment subscription
1225 comment_sub = inspect_table(metadata, "core__comment_subscriptions")
1226 for sub in db.execute(comment_sub.select()):
1227 db.execute(comment_sub.update().values(
1228 created=dt_to_utc(sub.created)
1229 ).where(comment_sub.c.id==sub.id))
1230
1231 # Convert Notification
1232 notification_table = inspect_table(metadata, "core__notifications")
1233 for notification in db.execute(notification_table.select()):
1234 db.execute(notification_table.update().values(
1235 created=dt_to_utc(notification.created)
1236 ).where(notification_table.c.id==notification.id))
1237
1238 # Convert ReportBase
1239 reportbase_table = inspect_table(metadata, "core__reports")
1240 for report in db.execute(reportbase_table.select()):
1241 db.execute(reportbase_table.update().values(
1242 created=dt_to_utc(report.created)
1243 ).where(reportbase_table.c.id==report.id))
1244
1245 # Convert Generator
1246 generator_table = inspect_table(metadata, "core__generators")
1247 for generator in db.execute(generator_table.select()):
1248 db.execute(generator_table.update().values(
1249 published=dt_to_utc(generator.published),
1250 updated=dt_to_utc(generator.updated)
1251 ).where(generator_table.c.id==generator.id))
1252
1253 # Convert Activity
1254 activity_table = inspect_table(metadata, "core__activities")
1255 for activity in db.execute(activity_table.select()):
1256 db.execute(activity_table.update().values(
1257 published=dt_to_utc(activity.published),
1258 updated=dt_to_utc(activity.updated)
1259 ).where(activity_table.c.id==activity.id))
1260
1261 # Commit this to the database
1262 db.commit()
1263
1264 ##
1265 # Migrations to handle migrating from activity specific foreign key to the
1266 # new GenericForeignKey implementations. They have been split up to improve
1267 # readability and minimise errors
1268 ##
1269
1270 class GenericModelReference_V0(declarative_base()):
1271 __tablename__ = "core__generic_model_reference"
1272
1273 id = Column(Integer, primary_key=True)
1274 obj_pk = Column(Integer, nullable=False)
1275 model_type = Column(Unicode, nullable=False)
1276
1277 @RegisterMigration(27, MIGRATIONS)
1278 def create_generic_model_reference(db):
1279 """ Creates the Generic Model Reference table """
1280 GenericModelReference_V0.__table__.create(db.bind)
1281 db.commit()
1282
1283 @RegisterMigration(28, MIGRATIONS)
1284 def add_foreign_key_fields(db):
1285 """
1286 Add the fields for GenericForeignKey to the model under temporary name,
1287 this is so that later a data migration can occur. They will be renamed to
1288 the origional names.
1289 """
1290 metadata = MetaData(bind=db.bind)
1291 activity_table = inspect_table(metadata, "core__activities")
1292
1293 # Create column and add to model.
1294 object_column = Column("temp_object", Integer, ForeignKey(GenericModelReference_V0.id))
1295 object_column.create(activity_table)
1296
1297 target_column = Column("temp_target", Integer, ForeignKey(GenericModelReference_V0.id))
1298 target_column.create(activity_table)
1299
1300 # Commit this to the database
1301 db.commit()
1302
1303 @RegisterMigration(29, MIGRATIONS)
1304 def migrate_data_foreign_keys(db):
1305 """
1306 This will migrate the data from the old object and target attributes which
1307 use the old ActivityIntermediator to the new temparay fields which use the
1308 new GenericForeignKey.
1309 """
1310
1311 metadata = MetaData(bind=db.bind)
1312 activity_table = inspect_table(metadata, "core__activities")
1313 ai_table = inspect_table(metadata, "core__activity_intermediators")
1314 gmr_table = inspect_table(metadata, "core__generic_model_reference")
1315
1316
1317 # Iterate through all activities doing the migration per activity.
1318 for activity in db.execute(activity_table.select()):
1319 # First do the "Activity.object" migration to "Activity.temp_object"
1320 # I need to get the object from the Activity, I can't use the old
1321 # Activity.get_object as we're in a migration.
1322 object_ai = db.execute(ai_table.select(
1323 ai_table.c.id==activity.object
1324 )).first()
1325
1326 object_ai_type = ActivityIntermediator_R0.TYPES[object_ai.type]
1327 object_ai_table = inspect_table(metadata, object_ai_type.__tablename__)
1328
1329 activity_object = db.execute(object_ai_table.select(
1330 object_ai_table.c.activity==object_ai.id
1331 )).first()
1332
1333 # now we need to create the GenericModelReference
1334 object_gmr = db.execute(gmr_table.insert().values(
1335 obj_pk=activity_object.id,
1336 model_type=object_ai_type.__tablename__
1337 ))
1338
1339 # Now set the ID of the GenericModelReference in the GenericForignKey
1340 db.execute(activity_table.update().values(
1341 temp_object=object_gmr.inserted_primary_key[0]
1342 ))
1343
1344 # Now do same process for "Activity.target" to "Activity.temp_target"
1345 # not all Activities have a target so if it doesn't just skip the rest
1346 # of this.
1347 if activity.target is None:
1348 continue
1349
1350 # Now get the target for the activity.
1351 target_ai = db.execute(ai_table.select(
1352 ai_table.c.id==activity.target
1353 )).first()
1354
1355 target_ai_type = ActivityIntermediator_R0.TYPES[target_ai.type]
1356 target_ai_table = inspect_table(metadata, target_ai_type.__tablename__)
1357
1358 activity_target = db.execute(target_ai_table.select(
1359 target_ai_table.c.activity==target_ai.id
1360 )).first()
1361
1362 # We now want to create the new target GenericModelReference
1363 target_gmr = db.execute(gmr_table.insert().values(
1364 obj_pk=activity_target.id,
1365 model_type=target_ai_type.__tablename__
1366 ))
1367
1368 # Now set the ID of the GenericModelReference in the GenericForignKey
1369 db.execute(activity_table.update().values(
1370 temp_object=target_gmr.inserted_primary_key[0]
1371 ))
1372
1373 # Commit to the database.
1374 db.commit()
1375
1376 @RegisterMigration(30, MIGRATIONS)
1377 def rename_and_remove_object_and_target(db):
1378 """
1379 Renames the new Activity.object and Activity.target fields and removes the
1380 old ones.
1381 """
1382 metadata = MetaData(bind=db.bind)
1383 activity_table = inspect_table(metadata, "core__activities")
1384
1385 # Firstly lets remove the old fields.
1386 old_object_column = activity_table.columns["object"]
1387 old_target_column = activity_table.columns["target"]
1388
1389 # Drop the tables.
1390 old_object_column.drop()
1391 old_target_column.drop()
1392
1393 # Now get the new columns.
1394 new_object_column = activity_table.columns["temp_object"]
1395 new_target_column = activity_table.columns["temp_target"]
1396
1397 # rename them to the old names.
1398 new_object_column.alter(name="object_id")
1399 new_target_column.alter(name="target_id")
1400
1401 # Commit the changes to the database.
1402 db.commit()
1403
1404 @RegisterMigration(31, MIGRATIONS)
1405 def remove_activityintermediator(db):
1406 """
1407 This removes the old specific ActivityIntermediator model which has been
1408 superseeded by the GenericForeignKey field.
1409 """
1410 metadata = MetaData(bind=db.bind)
1411
1412 # Remove the columns which reference the AI
1413 collection_table = inspect_table(metadata, "core__collections")
1414 collection_ai_column = collection_table.columns["activity"]
1415 collection_ai_column.drop()
1416
1417 media_entry_table = inspect_table(metadata, "core__media_entries")
1418 media_entry_ai_column = media_entry_table.columns["activity"]
1419 media_entry_ai_column.drop()
1420
1421 comments_table = inspect_table(metadata, "core__media_comments")
1422 comments_ai_column = comments_table.columns["activity"]
1423 comments_ai_column.drop()
1424
1425 user_table = inspect_table(metadata, "core__users")
1426 user_ai_column = user_table.columns["activity"]
1427 user_ai_column.drop()
1428
1429 # Drop the table
1430 ai_table = inspect_table(metadata, "core__activity_intermediators")
1431 ai_table.drop()
1432
1433 # Commit the changes
1434 db.commit()
1435
1436 ##
1437 # Migrations for converting the User model into a Local and Remote User
1438 # setup.
1439 ##
1440
1441 class LocalUser_V0(declarative_base()):
1442 __tablename__ = "core__local_users"
1443
1444 id = Column(Integer, ForeignKey(User.id), primary_key=True)
1445 username = Column(Unicode, nullable=False, unique=True)
1446 email = Column(Unicode, nullable=False)
1447 pw_hash = Column(Unicode)
1448
1449 wants_comment_notification = Column(Boolean, default=True)
1450 wants_notifications = Column(Boolean, default=True)
1451 license_preference = Column(Unicode)
1452 uploaded = Column(Integer, default=0)
1453 upload_limit = Column(Integer)
1454
1455 class RemoteUser_V0(declarative_base()):
1456 __tablename__ = "core__remote_users"
1457
1458 id = Column(Integer, ForeignKey(User.id), primary_key=True)
1459 webfinger = Column(Unicode, unique=True)
1460
1461 @RegisterMigration(32, MIGRATIONS)
1462 def federation_user_create_tables(db):
1463 """
1464 Create all the tables
1465 """
1466 # Create tables needed
1467 LocalUser_V0.__table__.create(db.bind)
1468 RemoteUser_V0.__table__.create(db.bind)
1469 db.commit()
1470
1471 metadata = MetaData(bind=db.bind)
1472 user_table = inspect_table(metadata, "core__users")
1473
1474 # Create the fields
1475 updated_column = Column(
1476 "updated",
1477 DateTime,
1478 default=datetime.datetime.utcnow
1479 )
1480 updated_column.create(user_table)
1481
1482 type_column = Column(
1483 "type",
1484 Unicode
1485 )
1486 type_column.create(user_table)
1487
1488 name_column = Column(
1489 "name",
1490 Unicode
1491 )
1492 name_column.create(user_table)
1493
1494 db.commit()
1495
1496 @RegisterMigration(33, MIGRATIONS)
1497 def federation_user_migrate_data(db):
1498 """
1499 Migrate the data over to the new user models
1500 """
1501 metadata = MetaData(bind=db.bind)
1502
1503 user_table = inspect_table(metadata, "core__users")
1504 local_user_table = inspect_table(metadata, "core__local_users")
1505
1506 for user in db.execute(user_table.select()):
1507 db.execute(local_user_table.insert().values(
1508 id=user.id,
1509 username=user.username,
1510 email=user.email,
1511 pw_hash=user.pw_hash,
1512 wants_comment_notification=user.wants_comment_notification,
1513 wants_notifications=user.wants_notifications,
1514 license_preference=user.license_preference,
1515 uploaded=user.uploaded,
1516 upload_limit=user.upload_limit
1517 ))
1518
1519 db.execute(user_table.update().where(user_table.c.id==user.id).values(
1520 updated=user.created,
1521 type=LocalUser.__mapper_args__["polymorphic_identity"]
1522 ))
1523
1524 db.commit()
1525
1526 class User_vR2(declarative_base()):
1527 __tablename__ = "rename__users"
1528
1529 id = Column(Integer, primary_key=True)
1530 url = Column(Unicode)
1531 bio = Column(UnicodeText)
1532 name = Column(Unicode)
1533 type = Column(Unicode)
1534 created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
1535 updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
1536 location = Column(Integer, ForeignKey(Location.id))
1537
1538 @RegisterMigration(34, MIGRATIONS)
1539 def federation_remove_fields(db):
1540 """
1541 This removes the fields from User model which aren't shared
1542 """
1543 metadata = MetaData(bind=db.bind)
1544
1545 user_table = inspect_table(metadata, "core__users")
1546
1547 # Remove the columns moved to LocalUser from User
1548 username_column = user_table.columns["username"]
1549 username_column.drop()
1550
1551 email_column = user_table.columns["email"]
1552 email_column.drop()
1553
1554 pw_hash_column = user_table.columns["pw_hash"]
1555 pw_hash_column.drop()
1556
1557 license_preference_column = user_table.columns["license_preference"]
1558 license_preference_column.drop()
1559
1560 uploaded_column = user_table.columns["uploaded"]
1561 uploaded_column.drop()
1562
1563 upload_limit_column = user_table.columns["upload_limit"]
1564 upload_limit_column.drop()
1565
1566 # SQLLite can't drop booleans -.-
1567 if db.bind.url.drivername == 'sqlite':
1568 # Create the new hacky table
1569 User_vR2.__table__.create(db.bind)
1570 db.commit()
1571 new_user_table = inspect_table(metadata, "rename__users")
1572 replace_table_hack(db, user_table, new_user_table)
1573 else:
1574 wcn_column = user_table.columns["wants_comment_notification"]
1575 wcn_column.drop()
1576
1577 wants_notifications_column = user_table.columns["wants_notifications"]
1578 wants_notifications_column.drop()
1579
1580 db.commit()
1581
1582 @RegisterMigration(35, MIGRATIONS)
1583 def federation_media_entry(db):
1584 metadata = MetaData(bind=db.bind)
1585 media_entry_table = inspect_table(metadata, "core__media_entries")
1586
1587 # Add new fields
1588 public_id_column = Column(
1589 "public_id",
1590 Unicode,
1591 unique=True,
1592 nullable=True
1593 )
1594 public_id_column.create(
1595 media_entry_table,
1596 unique_name="media_public_id"
1597 )
1598
1599 remote_column = Column(
1600 "remote",
1601 Boolean,
1602 default=False
1603 )
1604 remote_column.create(media_entry_table)
1605
1606 updated_column = Column(
1607 "updated",
1608 DateTime,
1609 default=datetime.datetime.utcnow,
1610 )
1611 updated_column.create(media_entry_table)
1612
1613 # Data migration
1614 for entry in db.execute(media_entry_table.select()):
1615 db.execute(media_entry_table.update().values(
1616 updated=entry.created,
1617 remote=False
1618 ))
1619
1620 db.commit()
1621
1622 @RegisterMigration(36, MIGRATIONS)
1623 def create_oauth1_dummies(db):
1624 """
1625 Creates a dummy client, request and access tokens.
1626
1627 This is used when invalid data is submitted but real clients and
1628 access tokens. The use of dummy objects prevents timing attacks.
1629 """
1630 metadata = MetaData(bind=db.bind)
1631 client_table = inspect_table(metadata, "core__clients")
1632 request_token_table = inspect_table(metadata, "core__request_tokens")
1633 access_token_table = inspect_table(metadata, "core__access_tokens")
1634
1635 # Whilst we don't rely on the secret key being unique or unknown to prevent
1636 # unauthorized clients from using it to authenticate, we still as an extra
1637 # layer of protection created a cryptographically secure key individual to
1638 # each instance that should never be able to be known.
1639 client_secret = crypto.random_string(50)
1640 request_token_secret = crypto.random_string(50)
1641 request_token_verifier = crypto.random_string(50)
1642 access_token_secret = crypto.random_string(50)
1643
1644 # Dummy created/updated datetime object
1645 epoc_datetime = datetime.datetime.fromtimestamp(0)
1646
1647 # Create the dummy Client
1648 db.execute(client_table.insert().values(
1649 id=oauth.DUMMY_CLIENT_ID,
1650 secret=client_secret,
1651 application_type="dummy",
1652 created=epoc_datetime,
1653 updated=epoc_datetime
1654 ))
1655
1656 # Create the dummy RequestToken
1657 db.execute(request_token_table.insert().values(
1658 token=oauth.DUMMY_REQUEST_TOKEN,
1659 secret=request_token_secret,
1660 client=oauth.DUMMY_CLIENT_ID,
1661 verifier=request_token_verifier,
1662 created=epoc_datetime,
1663 updated=epoc_datetime,
1664 callback="oob"
1665 ))
1666
1667 # Create the dummy AccessToken
1668 db.execute(access_token_table.insert().values(
1669 token=oauth.DUMMY_ACCESS_TOKEN,
1670 secret=access_token_secret,
1671 request_token=oauth.DUMMY_REQUEST_TOKEN,
1672 created=epoc_datetime,
1673 updated=epoc_datetime
1674 ))
1675
1676 # Commit the changes
1677 db.commit()