1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
27 from sqlalchemy
import (MetaData
, Table
, Column
, Boolean
, SmallInteger
,
28 Integer
, Unicode
, UnicodeText
, DateTime
,
29 ForeignKey
, Date
, Index
)
30 from sqlalchemy
.exc
import ProgrammingError
31 from sqlalchemy
.ext
.declarative
import declarative_base
32 from sqlalchemy
.sql
import and_
33 from sqlalchemy
.schema
import UniqueConstraint
35 from mediagoblin
import oauth
36 from mediagoblin
.tools
import crypto
37 from mediagoblin
.db
.extratypes
import JSONEncoded
, MutationDict
38 from mediagoblin
.db
.migration_tools
import (
39 RegisterMigration
, inspect_table
, replace_table_hack
)
40 from mediagoblin
.db
.models
import (MediaEntry
, Collection
, MediaComment
, User
,
41 Privilege
, Generator
, LocalUser
, Location
)
42 from mediagoblin
.db
.extratypes
import JSONEncoded
, MutationDict
48 @RegisterMigration(1, MIGRATIONS
)
49 def ogg_to_webm_audio(db_conn
):
50 metadata
= MetaData(bind
=db_conn
.bind
)
52 file_keynames
= Table('core__file_keynames', metadata
, autoload
=True,
53 autoload_with
=db_conn
.bind
)
56 file_keynames
.update().where(file_keynames
.c
.name
== 'ogg').
57 values(name
='webm_audio')
62 @RegisterMigration(2, MIGRATIONS
)
63 def add_wants_notification_column(db_conn
):
64 metadata
= MetaData(bind
=db_conn
.bind
)
66 users
= Table('core__users', metadata
, autoload
=True,
67 autoload_with
=db_conn
.bind
)
69 col
= Column('wants_comment_notification', Boolean
,
70 default
=True, nullable
=True)
71 col
.create(users
, populate_defaults
=True)
75 @RegisterMigration(3, MIGRATIONS
)
76 def add_transcoding_progress(db_conn
):
77 metadata
= MetaData(bind
=db_conn
.bind
)
79 media_entry
= inspect_table(metadata
, 'core__media_entries')
81 col
= Column('transcoding_progress', SmallInteger
)
82 col
.create(media_entry
)
86 class Collection_v0(declarative_base()):
87 __tablename__
= "core__collections"
89 id = Column(Integer
, primary_key
=True)
90 title
= Column(Unicode
, nullable
=False)
91 slug
= Column(Unicode
)
92 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
,
94 description
= Column(UnicodeText
)
95 creator
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
96 items
= Column(Integer
, default
=0)
98 class CollectionItem_v0(declarative_base()):
99 __tablename__
= "core__collection_items"
101 id = Column(Integer
, primary_key
=True)
102 media_entry
= Column(
103 Integer
, ForeignKey(MediaEntry
.id), nullable
=False, index
=True)
104 collection
= Column(Integer
, ForeignKey(Collection
.id), nullable
=False)
105 note
= Column(UnicodeText
, nullable
=True)
106 added
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
107 position
= Column(Integer
)
109 ## This should be activated, normally.
110 ## But this would change the way the next migration used to work.
111 ## So it's commented for now.
113 UniqueConstraint('collection', 'media_entry'),
116 collectionitem_unique_constraint_done
= False
118 @RegisterMigration(4, MIGRATIONS
)
119 def add_collection_tables(db_conn
):
120 Collection_v0
.__table__
.create(db_conn
.bind
)
121 CollectionItem_v0
.__table__
.create(db_conn
.bind
)
123 global collectionitem_unique_constraint_done
124 collectionitem_unique_constraint_done
= True
129 @RegisterMigration(5, MIGRATIONS
)
130 def add_mediaentry_collected(db_conn
):
131 metadata
= MetaData(bind
=db_conn
.bind
)
133 media_entry
= inspect_table(metadata
, 'core__media_entries')
135 col
= Column('collected', Integer
, default
=0)
136 col
.create(media_entry
)
140 class ProcessingMetaData_v0(declarative_base()):
141 __tablename__
= 'core__processing_metadata'
143 id = Column(Integer
, primary_key
=True)
144 media_entry_id
= Column(Integer
, ForeignKey(MediaEntry
.id), nullable
=False,
146 callback_url
= Column(Unicode
)
148 @RegisterMigration(6, MIGRATIONS
)
149 def create_processing_metadata_table(db
):
150 ProcessingMetaData_v0
.__table__
.create(db
.bind
)
154 # Okay, problem being:
155 # Migration #4 forgot to add the uniqueconstraint for the
156 # new tables. While creating the tables from scratch had
157 # the constraint enabled.
159 # So we have four situations that should end up at the same
163 # Well, easy. Just uses the tables in models.py
164 # 2. Fresh install using a git version just before this migration
165 # The tables are all there, the unique constraint is also there.
166 # This migration should do nothing.
167 # But as we can't detect the uniqueconstraint easily,
168 # this migration just adds the constraint again.
169 # And possibly fails very loud. But ignores the failure.
170 # 3. old install, not using git, just releases.
171 # This one will get the new tables in #4 (now with constraint!)
172 # And this migration is just skipped silently.
173 # 4. old install, always on latest git.
174 # This one has the tables, but lacks the constraint.
175 # So this migration adds the constraint.
176 @RegisterMigration(7, MIGRATIONS
)
177 def fix_CollectionItem_v0_constraint(db_conn
):
178 """Add the forgotten Constraint on CollectionItem"""
180 global collectionitem_unique_constraint_done
181 if collectionitem_unique_constraint_done
:
182 # Reset it. Maybe the whole thing gets run again
183 # For a different db?
184 collectionitem_unique_constraint_done
= False
187 metadata
= MetaData(bind
=db_conn
.bind
)
189 CollectionItem_table
= inspect_table(metadata
, 'core__collection_items')
191 constraint
= UniqueConstraint('collection', 'media_entry',
192 name
='core__collection_items_collection_media_entry_key',
193 table
=CollectionItem_table
)
197 except ProgrammingError
:
198 # User probably has an install that was run since the
199 # collection tables were added, so we don't need to run this migration.
205 @RegisterMigration(8, MIGRATIONS
)
206 def add_license_preference(db
):
207 metadata
= MetaData(bind
=db
.bind
)
209 user_table
= inspect_table(metadata
, 'core__users')
211 col
= Column('license_preference', Unicode
)
212 col
.create(user_table
)
216 @RegisterMigration(9, MIGRATIONS
)
217 def mediaentry_new_slug_era(db
):
219 Update for the new era for media type slugs.
221 Entries without slugs now display differently in the url like:
224 ... because of this, we should back-convert:
225 - entries without slugs should be converted to use the id, if possible, to
226 make old urls still work
227 - slugs with = (or also : which is now also not allowed) to have those
228 stripped out (small possibility of breakage here sadly)
231 def slug_and_user_combo_exists(slug
, uploader
):
234 and_(media_table
.c
.uploader
==uploader
,
235 media_table
.c
.slug
==slug
))).first() is not None
237 def append_garbage_till_unique(row
, new_slug
):
239 Attach junk to this row until it's unique, then save it
241 if slug_and_user_combo_exists(new_slug
, row
.uploader
):
242 # okay, still no success;
243 # let's whack junk on there till it's unique.
244 new_slug
+= '-' + uuid
.uuid4().hex[:4]
245 # keep going if necessary!
246 while slug_and_user_combo_exists(new_slug
, row
.uploader
):
247 new_slug
+= uuid
.uuid4().hex[:4]
250 media_table
.update(). \
251 where(media_table
.c
.id==row
.id). \
252 values(slug
=new_slug
))
254 metadata
= MetaData(bind
=db
.bind
)
256 media_table
= inspect_table(metadata
, 'core__media_entries')
258 for row
in db
.execute(media_table
.select()):
259 # no slug, try setting to an id
261 append_garbage_till_unique(row
, six
.text_type(row
.id))
262 # has "=" or ":" in it... we're getting rid of those
263 elif u
"=" in row
.slug
or u
":" in row
.slug
:
264 append_garbage_till_unique(
265 row
, row
.slug
.replace(u
"=", u
"-").replace(u
":", u
"-"))
270 @RegisterMigration(10, MIGRATIONS
)
271 def unique_collections_slug(db
):
272 """Add unique constraint to collection slug"""
273 metadata
= MetaData(bind
=db
.bind
)
274 collection_table
= inspect_table(metadata
, "core__collections")
278 for row
in db
.execute(collection_table
.select()):
279 # if duplicate slug, generate a unique slug
280 if row
.creator
in existing_slugs
and row
.slug
in \
281 existing_slugs
[row
.creator
]:
282 slugs_to_change
.append(row
.id)
284 if not row
.creator
in existing_slugs
:
285 existing_slugs
[row
.creator
] = [row
.slug
]
287 existing_slugs
[row
.creator
].append(row
.slug
)
289 for row_id
in slugs_to_change
:
290 new_slug
= six
.text_type(uuid
.uuid4())
291 db
.execute(collection_table
.update().
292 where(collection_table
.c
.id == row_id
).
293 values(slug
=new_slug
))
294 # sqlite does not like to change the schema when a transaction(update) is
298 constraint
= UniqueConstraint('creator', 'slug',
299 name
='core__collection_creator_slug_key',
300 table
=collection_table
)
305 @RegisterMigration(11, MIGRATIONS
)
306 def drop_token_related_User_columns(db
):
308 Drop unneeded columns from the User table after switching to using
309 itsdangerous tokens for email and forgot password verification.
311 metadata
= MetaData(bind
=db
.bind
)
312 user_table
= inspect_table(metadata
, 'core__users')
314 verification_key
= user_table
.columns
['verification_key']
315 fp_verification_key
= user_table
.columns
['fp_verification_key']
316 fp_token_expire
= user_table
.columns
['fp_token_expire']
318 verification_key
.drop()
319 fp_verification_key
.drop()
320 fp_token_expire
.drop()
325 class CommentSubscription_v0(declarative_base()):
326 __tablename__
= 'core__comment_subscriptions'
327 id = Column(Integer
, primary_key
=True)
329 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
331 media_entry_id
= Column(Integer
, ForeignKey(MediaEntry
.id), nullable
=False)
333 user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
335 notify
= Column(Boolean
, nullable
=False, default
=True)
336 send_email
= Column(Boolean
, nullable
=False, default
=True)
339 class Notification_v0(declarative_base()):
340 __tablename__
= 'core__notifications'
341 id = Column(Integer
, primary_key
=True)
342 type = Column(Unicode
)
344 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
346 user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False,
348 seen
= Column(Boolean
, default
=lambda: False, index
=True)
351 class CommentNotification_v0(Notification_v0
):
352 __tablename__
= 'core__comment_notifications'
353 id = Column(Integer
, ForeignKey(Notification_v0
.id), primary_key
=True)
355 subject_id
= Column(Integer
, ForeignKey(MediaComment
.id))
358 class ProcessingNotification_v0(Notification_v0
):
359 __tablename__
= 'core__processing_notifications'
361 id = Column(Integer
, ForeignKey(Notification_v0
.id), primary_key
=True)
363 subject_id
= Column(Integer
, ForeignKey(MediaEntry
.id))
366 @RegisterMigration(12, MIGRATIONS
)
367 def add_new_notification_tables(db
):
368 metadata
= MetaData(bind
=db
.bind
)
370 user_table
= inspect_table(metadata
, 'core__users')
371 mediaentry_table
= inspect_table(metadata
, 'core__media_entries')
372 mediacomment_table
= inspect_table(metadata
, 'core__media_comments')
374 CommentSubscription_v0
.__table__
.create(db
.bind
)
376 Notification_v0
.__table__
.create(db
.bind
)
377 CommentNotification_v0
.__table__
.create(db
.bind
)
378 ProcessingNotification_v0
.__table__
.create(db
.bind
)
383 @RegisterMigration(13, MIGRATIONS
)
384 def pw_hash_nullable(db
):
385 """Make pw_hash column nullable"""
386 metadata
= MetaData(bind
=db
.bind
)
387 user_table
= inspect_table(metadata
, "core__users")
389 user_table
.c
.pw_hash
.alter(nullable
=True)
391 # sqlite+sqlalchemy seems to drop this constraint during the
392 # migration, so we add it back here for now a bit manually.
393 if db
.bind
.url
.drivername
== 'sqlite':
394 constraint
= UniqueConstraint('username', table
=user_table
)
401 class Client_v0(declarative_base()):
403 Model representing a client - Used for API Auth
405 __tablename__
= "core__clients"
407 id = Column(Unicode
, nullable
=True, primary_key
=True)
408 secret
= Column(Unicode
, nullable
=False)
409 expirey
= Column(DateTime
, nullable
=True)
410 application_type
= Column(Unicode
, nullable
=False)
411 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
412 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
415 redirect_uri
= Column(JSONEncoded
, nullable
=True)
416 logo_url
= Column(Unicode
, nullable
=True)
417 application_name
= Column(Unicode
, nullable
=True)
418 contacts
= Column(JSONEncoded
, nullable
=True)
421 if self
.application_name
:
422 return "<Client {0} - {1}>".format(self
.application_name
, self
.id)
424 return "<Client {0}>".format(self
.id)
426 class RequestToken_v0(declarative_base()):
428 Model for representing the request tokens
430 __tablename__
= "core__request_tokens"
432 token
= Column(Unicode
, primary_key
=True)
433 secret
= Column(Unicode
, nullable
=False)
434 client
= Column(Unicode
, ForeignKey(Client_v0
.id))
435 user
= Column(Integer
, ForeignKey(User
.id), nullable
=True)
436 used
= Column(Boolean
, default
=False)
437 authenticated
= Column(Boolean
, default
=False)
438 verifier
= Column(Unicode
, nullable
=True)
439 callback
= Column(Unicode
, nullable
=False, default
=u
"oob")
440 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
441 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
443 class AccessToken_v0(declarative_base()):
445 Model for representing the access tokens
447 __tablename__
= "core__access_tokens"
449 token
= Column(Unicode
, nullable
=False, primary_key
=True)
450 secret
= Column(Unicode
, nullable
=False)
451 user
= Column(Integer
, ForeignKey(User
.id))
452 request_token
= Column(Unicode
, ForeignKey(RequestToken_v0
.token
))
453 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
454 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
457 class NonceTimestamp_v0(declarative_base()):
459 A place the timestamp and nonce can be stored - this is for OAuth1
461 __tablename__
= "core__nonce_timestamps"
463 nonce
= Column(Unicode
, nullable
=False, primary_key
=True)
464 timestamp
= Column(DateTime
, nullable
=False, primary_key
=True)
467 @RegisterMigration(14, MIGRATIONS
)
468 def create_oauth1_tables(db
):
469 """ Creates the OAuth1 tables """
471 Client_v0
.__table__
.create(db
.bind
)
472 RequestToken_v0
.__table__
.create(db
.bind
)
473 AccessToken_v0
.__table__
.create(db
.bind
)
474 NonceTimestamp_v0
.__table__
.create(db
.bind
)
478 @RegisterMigration(15, MIGRATIONS
)
479 def wants_notifications(db
):
480 """Add a wants_notifications field to User model"""
481 metadata
= MetaData(bind
=db
.bind
)
482 user_table
= inspect_table(metadata
, "core__users")
483 col
= Column('wants_notifications', Boolean
, default
=True)
484 col
.create(user_table
)
489 @RegisterMigration(16, MIGRATIONS
)
490 def upload_limits(db
):
491 """Add user upload limit columns"""
492 metadata
= MetaData(bind
=db
.bind
)
494 user_table
= inspect_table(metadata
, 'core__users')
495 media_entry_table
= inspect_table(metadata
, 'core__media_entries')
497 col
= Column('uploaded', Integer
, default
=0)
498 col
.create(user_table
)
500 col
= Column('upload_limit', Integer
)
501 col
.create(user_table
)
503 col
= Column('file_size', Integer
, default
=0)
504 col
.create(media_entry_table
)
509 @RegisterMigration(17, MIGRATIONS
)
510 def add_file_metadata(db
):
511 """Add file_metadata to MediaFile"""
512 metadata
= MetaData(bind
=db
.bind
)
513 media_file_table
= inspect_table(metadata
, "core__mediafiles")
515 col
= Column('file_metadata', MutationDict
.as_mutable(JSONEncoded
))
516 col
.create(media_file_table
)
524 class ReportBase_v0(declarative_base()):
525 __tablename__
= 'core__reports'
526 id = Column(Integer
, primary_key
=True)
527 reporter_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
528 report_content
= Column(UnicodeText
)
529 reported_user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
530 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
531 discriminator
= Column('type', Unicode(50))
532 resolver_id
= Column(Integer
, ForeignKey(User
.id))
533 resolved
= Column(DateTime
)
534 result
= Column(UnicodeText
)
535 __mapper_args__
= {'polymorphic_on': discriminator
}
538 class CommentReport_v0(ReportBase_v0
):
539 __tablename__
= 'core__reports_on_comments'
540 __mapper_args__
= {'polymorphic_identity': 'comment_report'}
542 id = Column('id',Integer
, ForeignKey('core__reports.id'),
544 comment_id
= Column(Integer
, ForeignKey(MediaComment
.id), nullable
=True)
547 class MediaReport_v0(ReportBase_v0
):
548 __tablename__
= 'core__reports_on_media'
549 __mapper_args__
= {'polymorphic_identity': 'media_report'}
551 id = Column('id',Integer
, ForeignKey('core__reports.id'), primary_key
=True)
552 media_entry_id
= Column(Integer
, ForeignKey(MediaEntry
.id), nullable
=True)
555 class UserBan_v0(declarative_base()):
556 __tablename__
= 'core__user_bans'
557 user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False,
559 expiration_date
= Column(Date
)
560 reason
= Column(UnicodeText
, nullable
=False)
563 class Privilege_v0(declarative_base()):
564 __tablename__
= 'core__privileges'
565 id = Column(Integer
, nullable
=False, primary_key
=True, unique
=True)
566 privilege_name
= Column(Unicode
, nullable
=False, unique
=True)
569 class PrivilegeUserAssociation_v0(declarative_base()):
570 __tablename__
= 'core__privileges_users'
571 privilege_id
= Column(
572 'core__privilege_id',
579 ForeignKey(Privilege
.id),
583 PRIVILEGE_FOUNDATIONS_v0
= [{'privilege_name':u
'admin'},
584 {'privilege_name':u
'moderator'},
585 {'privilege_name':u
'uploader'},
586 {'privilege_name':u
'reporter'},
587 {'privilege_name':u
'commenter'},
588 {'privilege_name':u
'active'}]
590 # vR1 stands for "version Rename 1". This only exists because we need
591 # to deal with dropping some booleans and it's otherwise impossible
594 class User_vR1(declarative_base()):
595 __tablename__
= 'rename__users'
596 id = Column(Integer
, primary_key
=True)
597 username
= Column(Unicode
, nullable
=False, unique
=True)
598 email
= Column(Unicode
, nullable
=False)
599 pw_hash
= Column(Unicode
)
600 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
601 wants_comment_notification
= Column(Boolean
, default
=True)
602 wants_notifications
= Column(Boolean
, default
=True)
603 license_preference
= Column(Unicode
)
604 url
= Column(Unicode
)
605 bio
= Column(UnicodeText
) # ??
606 uploaded
= Column(Integer
, default
=0)
607 upload_limit
= Column(Integer
)
610 @RegisterMigration(18, MIGRATIONS
)
611 def create_moderation_tables(db
):
613 # First, we will create the new tables in the database.
614 #--------------------------------------------------------------------------
615 ReportBase_v0
.__table__
.create(db
.bind
)
616 CommentReport_v0
.__table__
.create(db
.bind
)
617 MediaReport_v0
.__table__
.create(db
.bind
)
618 UserBan_v0
.__table__
.create(db
.bind
)
619 Privilege_v0
.__table__
.create(db
.bind
)
620 PrivilegeUserAssociation_v0
.__table__
.create(db
.bind
)
624 # Then initialize the tables that we will later use
625 #--------------------------------------------------------------------------
626 metadata
= MetaData(bind
=db
.bind
)
627 privileges_table
= inspect_table(metadata
, "core__privileges")
628 user_table
= inspect_table(metadata
, 'core__users')
629 user_privilege_assoc
= inspect_table(
630 metadata
, 'core__privileges_users')
632 # This section initializes the default Privilege foundations, that
633 # would be created through the FOUNDATIONS system in a new instance
634 #--------------------------------------------------------------------------
635 for parameters
in PRIVILEGE_FOUNDATIONS_v0
:
636 db
.execute(privileges_table
.insert().values(**parameters
))
640 # This next section takes the information from the old is_admin and status
641 # columns and converts those to the new privilege system
642 #--------------------------------------------------------------------------
643 admin_users_ids
, active_users_ids
, inactive_users_ids
= (
645 user_table
.select().where(
646 user_table
.c
.is_admin
==True)).fetchall(),
648 user_table
.select().where(
649 user_table
.c
.is_admin
==False).where(
650 user_table
.c
.status
==u
"active")).fetchall(),
652 user_table
.select().where(
653 user_table
.c
.is_admin
==False).where(
654 user_table
.c
.status
!=u
"active")).fetchall())
656 # Get the ids for each of the privileges so we can reference them ~~~~~~~~~
657 (admin_privilege_id
, uploader_privilege_id
,
658 reporter_privilege_id
, commenter_privilege_id
,
659 active_privilege_id
) = [
660 db
.execute(privileges_table
.select().where(
661 privileges_table
.c
.privilege_name
==privilege_name
)).first()['id']
662 for privilege_name
in
663 [u
"admin",u
"uploader",u
"reporter",u
"commenter",u
"active"]
666 # Give each user the appopriate privileges depending whether they are an
667 # admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
668 for admin_user
in admin_users_ids
:
669 admin_user_id
= admin_user
['id']
670 for privilege_id
in [admin_privilege_id
, uploader_privilege_id
,
671 reporter_privilege_id
, commenter_privilege_id
,
672 active_privilege_id
]:
673 db
.execute(user_privilege_assoc
.insert().values(
674 core__privilege_id
=admin_user_id
,
675 core__user_id
=privilege_id
))
677 for active_user
in active_users_ids
:
678 active_user_id
= active_user
['id']
679 for privilege_id
in [uploader_privilege_id
, reporter_privilege_id
,
680 commenter_privilege_id
, active_privilege_id
]:
681 db
.execute(user_privilege_assoc
.insert().values(
682 core__privilege_id
=active_user_id
,
683 core__user_id
=privilege_id
))
685 for inactive_user
in inactive_users_ids
:
686 inactive_user_id
= inactive_user
['id']
687 for privilege_id
in [uploader_privilege_id
, reporter_privilege_id
,
688 commenter_privilege_id
]:
689 db
.execute(user_privilege_assoc
.insert().values(
690 core__privilege_id
=inactive_user_id
,
691 core__user_id
=privilege_id
))
695 # And then, once the information is taken from is_admin & status columns
696 # we drop all of the vestigial columns from the User table.
697 #--------------------------------------------------------------------------
698 if db
.bind
.url
.drivername
== 'sqlite':
699 # SQLite has some issues that make it *impossible* to drop boolean
700 # columns. So, the following code is a very hacky workaround which
701 # makes it possible. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
703 User_vR1
.__table__
.create(db
.bind
)
705 new_user_table
= inspect_table(metadata
, 'rename__users')
706 replace_table_hack(db
, user_table
, new_user_table
)
708 # If the db is not run using SQLite, this process is much simpler ~~~~~
710 status
= user_table
.columns
['status']
711 email_verified
= user_table
.columns
['email_verified']
712 is_admin
= user_table
.columns
['is_admin']
714 email_verified
.drop()
720 @RegisterMigration(19, MIGRATIONS
)
721 def drop_MediaEntry_collected(db
):
723 Drop unused MediaEntry.collected column
725 metadata
= MetaData(bind
=db
.bind
)
727 media_collected
= inspect_table(metadata
, 'core__media_entries')
728 media_collected
= media_collected
.columns
['collected']
730 media_collected
.drop()
735 @RegisterMigration(20, MIGRATIONS
)
736 def add_metadata_column(db
):
737 metadata
= MetaData(bind
=db
.bind
)
739 media_entry
= inspect_table(metadata
, 'core__media_entries')
741 col
= Column('media_metadata', MutationDict
.as_mutable(JSONEncoded
),
742 default
=MutationDict())
743 col
.create(media_entry
)
748 class PrivilegeUserAssociation_R1(declarative_base()):
749 __tablename__
= 'rename__privileges_users'
758 ForeignKey(Privilege
.id),
761 @RegisterMigration(21, MIGRATIONS
)
762 def fix_privilege_user_association_table(db
):
764 There was an error in the PrivilegeUserAssociation table that allowed for a
765 dangerous sql error. We need to the change the name of the columns to be
766 unique, and properly referenced.
768 metadata
= MetaData(bind
=db
.bind
)
770 privilege_user_assoc
= inspect_table(
771 metadata
, 'core__privileges_users')
773 # This whole process is more complex if we're dealing with sqlite
774 if db
.bind
.url
.drivername
== 'sqlite':
775 PrivilegeUserAssociation_R1
.__table__
.create(db
.bind
)
778 new_privilege_user_assoc
= inspect_table(
779 metadata
, 'rename__privileges_users')
780 result
= db
.execute(privilege_user_assoc
.select())
782 # The columns were improperly named before, so we switch the columns
783 user_id
, priv_id
= row
['core__privilege_id'], row
['core__user_id']
784 db
.execute(new_privilege_user_assoc
.insert().values(
790 privilege_user_assoc
.drop()
791 new_privilege_user_assoc
.rename('core__privileges_users')
793 # much simpler if postgres though!
795 privilege_user_assoc
.c
.core__user_id
.alter(name
="privilege")
796 privilege_user_assoc
.c
.core__privilege_id
.alter(name
="user")
801 @RegisterMigration(22, MIGRATIONS
)
802 def add_index_username_field(db
):
804 This migration has been found to be doing the wrong thing. See
805 the documentation in migration 23 (revert_username_index) below
806 which undoes this for those databases that did run this migration.
809 This indexes the User.username field which is frequently queried
810 for example a user logging in. This solves the issue #894
812 ## This code is left commented out *on purpose!*
814 ## We do not normally allow commented out code like this in
815 ## MediaGoblin but this is a special case: since this migration has
816 ## been nullified but with great work to set things back below,
817 ## this is commented out for historical clarity.
819 # metadata = MetaData(bind=db.bind)
820 # user_table = inspect_table(metadata, "core__users")
822 # new_index = Index("ix_core__users_uploader", user_table.c.username)
829 @RegisterMigration(23, MIGRATIONS
)
830 def revert_username_index(db
):
832 Revert the stuff we did in migration 22 above.
834 There were a couple of problems with what we did:
835 - There was never a need for this migration! The unique
836 constraint had an implicit b-tree index, so it wasn't really
837 needed. (This is my (Chris Webber's) fault for suggesting it
838 needed to happen without knowing what's going on... my bad!)
839 - On top of that, databases created after the models.py was
840 changed weren't the same as those that had been run through
843 As such, we're setting things back to the way they were before,
844 but as it turns out, that's tricky to do!
846 metadata
= MetaData(bind
=db
.bind
)
847 user_table
= inspect_table(metadata
, "core__users")
849 [(index
.name
, index
) for index
in user_table
.indexes
])
851 # index from unnecessary migration
852 users_uploader_index
= indexes
.get(u
'ix_core__users_uploader')
853 # index created from models.py after (unique=True, index=True)
854 # was set in models.py
855 users_username_index
= indexes
.get(u
'ix_core__users_username')
857 if users_uploader_index
is None and users_username_index
is None:
858 # We don't need to do anything.
859 # The database isn't in a state where it needs fixing
861 # (ie, either went through the previous borked migration or
862 # was initialized with a models.py where core__users was both
863 # unique=True and index=True)
866 if db
.bind
.url
.drivername
== 'sqlite':
867 # Again, sqlite has problems. So this is tricky.
869 # Yes, this is correct to use User_vR1! Nothing has changed
870 # between the *correct* version of this table and migration 18.
871 User_vR1
.__table__
.create(db
.bind
)
873 new_user_table
= inspect_table(metadata
, 'rename__users')
874 replace_table_hack(db
, user_table
, new_user_table
)
877 # If the db is not run using SQLite, we don't need to do crazy
880 # Remove whichever of the not-used indexes are in place
881 if users_uploader_index
is not None:
882 users_uploader_index
.drop()
883 if users_username_index
is not None:
884 users_username_index
.drop()
886 # Given we're removing indexes then adding a unique constraint
887 # which *we know might fail*, thus probably rolling back the
888 # session, let's commit here.
892 # Add the unique constraint
893 constraint
= UniqueConstraint(
894 'username', table
=user_table
)
896 except ProgrammingError
:
897 # constraint already exists, no need to add
902 class Generator_R0(declarative_base()):
903 __tablename__
= "core__generators"
904 id = Column(Integer
, primary_key
=True)
905 name
= Column(Unicode
, nullable
=False)
906 published
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
907 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
908 object_type
= Column(Unicode
, nullable
=False)
910 class ActivityIntermediator_R0(declarative_base()):
911 __tablename__
= "core__activity_intermediators"
912 id = Column(Integer
, primary_key
=True)
913 type = Column(Unicode
, nullable
=False)
915 # These are needed for migration 29
919 "comment": MediaComment
,
920 "collection": Collection
,
923 class Activity_R0(declarative_base()):
924 __tablename__
= "core__activities"
925 id = Column(Integer
, primary_key
=True)
926 actor
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
927 published
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
928 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
929 verb
= Column(Unicode
, nullable
=False)
930 content
= Column(Unicode
, nullable
=True)
931 title
= Column(Unicode
, nullable
=True)
932 generator
= Column(Integer
, ForeignKey(Generator_R0
.id), nullable
=True)
933 object = Column(Integer
,
934 ForeignKey(ActivityIntermediator_R0
.id),
936 target
= Column(Integer
,
937 ForeignKey(ActivityIntermediator_R0
.id),
941 @RegisterMigration(24, MIGRATIONS
)
942 def activity_migration(db
):
944 Creates everything to create activities in GMG
945 - Adds Activity, ActivityIntermediator and Generator table
946 - Creates GMG service generator for activities produced by the server
947 - Adds the activity_as_object and activity_as_target to objects/targets
948 - Retroactively adds activities for what we can acurately work out
950 # Set constants we'll use later
951 FOREIGN_KEY
= "core__activity_intermediators.id"
952 ACTIVITY_COLUMN
= "activity"
954 # Create the new tables.
955 ActivityIntermediator_R0
.__table__
.create(db
.bind
)
956 Generator_R0
.__table__
.create(db
.bind
)
957 Activity_R0
.__table__
.create(db
.bind
)
960 # Initiate the tables we want to use later
961 metadata
= MetaData(bind
=db
.bind
)
962 user_table
= inspect_table(metadata
, "core__users")
963 activity_table
= inspect_table(metadata
, "core__activities")
964 generator_table
= inspect_table(metadata
, "core__generators")
965 collection_table
= inspect_table(metadata
, "core__collections")
966 media_entry_table
= inspect_table(metadata
, "core__media_entries")
967 media_comments_table
= inspect_table(metadata
, "core__media_comments")
968 ai_table
= inspect_table(metadata
, "core__activity_intermediators")
971 # Create the foundations for Generator
972 db
.execute(generator_table
.insert().values(
973 name
="GNU Mediagoblin",
974 object_type
="service",
975 published
=datetime
.datetime
.now(),
976 updated
=datetime
.datetime
.now()
980 # Get the ID of that generator
981 gmg_generator
= db
.execute(generator_table
.select(
982 generator_table
.c
.name
==u
"GNU Mediagoblin")).first()
985 # Now we want to modify the tables which MAY have an activity at some point
986 media_col
= Column(ACTIVITY_COLUMN
, Integer
, ForeignKey(FOREIGN_KEY
))
987 media_col
.create(media_entry_table
)
989 user_col
= Column(ACTIVITY_COLUMN
, Integer
, ForeignKey(FOREIGN_KEY
))
990 user_col
.create(user_table
)
992 comments_col
= Column(ACTIVITY_COLUMN
, Integer
, ForeignKey(FOREIGN_KEY
))
993 comments_col
.create(media_comments_table
)
995 collection_col
= Column(ACTIVITY_COLUMN
, Integer
, ForeignKey(FOREIGN_KEY
))
996 collection_col
.create(collection_table
)
1000 # Now we want to retroactively add what activities we can
1001 # first we'll add activities when people uploaded media.
1002 # these can't have content as it's not fesible to get the
1003 # correct content strings.
1004 for media
in db
.execute(media_entry_table
.select()):
1005 # Now we want to create the intermedaitory
1006 db_ai
= db
.execute(ai_table
.insert().values(
1009 db_ai
= db
.execute(ai_table
.select(
1010 ai_table
.c
.id==db_ai
.inserted_primary_key
[0]
1016 "actor": media
.uploader
,
1017 "published": media
.created
,
1018 "updated": media
.created
,
1019 "generator": gmg_generator
.id,
1022 db
.execute(activity_table
.insert().values(**activity
))
1024 # Add the AI to the media.
1025 db
.execute(media_entry_table
.update().values(
1027 ).where(media_entry_table
.c
.id==media
.id))
1029 # Now we want to add all the comments people made
1030 for comment
in db
.execute(media_comments_table
.select()):
1031 # Get the MediaEntry for the comment
1032 media_entry
= db
.execute(
1033 media_entry_table
.select(
1034 media_entry_table
.c
.id==comment
.media_entry
1037 # Create an AI for target
1038 db_ai_media
= db
.execute(ai_table
.select(
1039 ai_table
.c
.id==media_entry
.activity
1043 media_comments_table
.update().values(
1044 activity
=db_ai_media
1045 ).where(media_comments_table
.c
.id==media_entry
.id))
1047 # Now create the AI for the comment
1048 db_ai_comment
= db
.execute(ai_table
.insert().values(
1050 )).inserted_primary_key
[0]
1054 "actor": comment
.author
,
1055 "published": comment
.created
,
1056 "updated": comment
.created
,
1057 "generator": gmg_generator
.id,
1058 "object": db_ai_comment
,
1059 "target": db_ai_media
,
1062 # Now add the comment object
1063 db
.execute(activity_table
.insert().values(**activity
))
1065 # Now add activity to comment
1066 db
.execute(media_comments_table
.update().values(
1067 activity
=db_ai_comment
1068 ).where(media_comments_table
.c
.id==comment
.id))
1070 # Create 'create' activities for all collections
1071 for collection
in db
.execute(collection_table
.select()):
1073 db_ai
= db
.execute(ai_table
.insert().values(
1076 db_ai
= db
.execute(ai_table
.select(
1077 ai_table
.c
.id==db_ai
.inserted_primary_key
[0]
1080 # Now add link the collection to the AI
1081 db
.execute(collection_table
.update().values(
1083 ).where(collection_table
.c
.id==collection
.id))
1087 "actor": collection
.creator
,
1088 "published": collection
.created
,
1089 "updated": collection
.created
,
1090 "generator": gmg_generator
.id,
1094 db
.execute(activity_table
.insert().values(**activity
))
1096 # Now add the activity to the collection
1097 db
.execute(collection_table
.update().values(
1099 ).where(collection_table
.c
.id==collection
.id))
1103 class Location_V0(declarative_base()):
1104 __tablename__
= "core__locations"
1105 id = Column(Integer
, primary_key
=True)
1106 name
= Column(Unicode
)
1107 position
= Column(MutationDict
.as_mutable(JSONEncoded
))
1108 address
= Column(MutationDict
.as_mutable(JSONEncoded
))
1110 @RegisterMigration(25, MIGRATIONS
)
1111 def add_location_model(db
):
1112 """ Add location model """
1113 metadata
= MetaData(bind
=db
.bind
)
1115 # Create location table
1116 Location_V0
.__table__
.create(db
.bind
)
1119 # Inspect the tables we need
1120 user
= inspect_table(metadata
, "core__users")
1121 collections
= inspect_table(metadata
, "core__collections")
1122 media_entry
= inspect_table(metadata
, "core__media_entries")
1123 media_comments
= inspect_table(metadata
, "core__media_comments")
1125 # Now add location support to the various models
1126 col
= Column("location", Integer
, ForeignKey(Location_V0
.id))
1129 col
= Column("location", Integer
, ForeignKey(Location_V0
.id))
1130 col
.create(collections
)
1132 col
= Column("location", Integer
, ForeignKey(Location_V0
.id))
1133 col
.create(media_entry
)
1135 col
= Column("location", Integer
, ForeignKey(Location_V0
.id))
1136 col
.create(media_comments
)
1140 @RegisterMigration(26, MIGRATIONS
)
1141 def datetime_to_utc(db
):
1142 """ Convert datetime stamps to UTC """
1143 # Get the server's timezone, this is what the database has stored
1144 server_timezone
= dateutil
.tz
.tzlocal()
1147 # Look up all the timestamps and convert them to UTC
1149 metadata
= MetaData(bind
=db
.bind
)
1152 # Add the current timezone
1153 dt
= dt
.replace(tzinfo
=server_timezone
)
1156 return dt
.astimezone(pytz
.UTC
)
1158 # Convert the User model
1159 user_table
= inspect_table(metadata
, "core__users")
1160 for user
in db
.execute(user_table
.select()):
1161 db
.execute(user_table
.update().values(
1162 created
=dt_to_utc(user
.created
)
1163 ).where(user_table
.c
.id==user
.id))
1166 client_table
= inspect_table(metadata
, "core__clients")
1167 for client
in db
.execute(client_table
.select()):
1168 db
.execute(client_table
.update().values(
1169 created
=dt_to_utc(client
.created
),
1170 updated
=dt_to_utc(client
.updated
)
1171 ).where(client_table
.c
.id==client
.id))
1173 # Convert RequestToken
1174 rt_table
= inspect_table(metadata
, "core__request_tokens")
1175 for request_token
in db
.execute(rt_table
.select()):
1176 db
.execute(rt_table
.update().values(
1177 created
=dt_to_utc(request_token
.created
),
1178 updated
=dt_to_utc(request_token
.updated
)
1179 ).where(rt_table
.c
.token
==request_token
.token
))
1181 # Convert AccessToken
1182 at_table
= inspect_table(metadata
, "core__access_tokens")
1183 for access_token
in db
.execute(at_table
.select()):
1184 db
.execute(at_table
.update().values(
1185 created
=dt_to_utc(access_token
.created
),
1186 updated
=dt_to_utc(access_token
.updated
)
1187 ).where(at_table
.c
.token
==access_token
.token
))
1189 # Convert MediaEntry
1190 media_table
= inspect_table(metadata
, "core__media_entries")
1191 for media
in db
.execute(media_table
.select()):
1192 db
.execute(media_table
.update().values(
1193 created
=dt_to_utc(media
.created
)
1194 ).where(media_table
.c
.id==media
.id))
1196 # Convert Media Attachment File
1197 media_attachment_table
= inspect_table(metadata
, "core__attachment_files")
1198 for ma
in db
.execute(media_attachment_table
.select()):
1199 db
.execute(media_attachment_table
.update().values(
1200 created
=dt_to_utc(ma
.created
)
1201 ).where(media_attachment_table
.c
.id==ma
.id))
1203 # Convert MediaComment
1204 comment_table
= inspect_table(metadata
, "core__media_comments")
1205 for comment
in db
.execute(comment_table
.select()):
1206 db
.execute(comment_table
.update().values(
1207 created
=dt_to_utc(comment
.created
)
1208 ).where(comment_table
.c
.id==comment
.id))
1210 # Convert Collection
1211 collection_table
= inspect_table(metadata
, "core__collections")
1212 for collection
in db
.execute(collection_table
.select()):
1213 db
.execute(collection_table
.update().values(
1214 created
=dt_to_utc(collection
.created
)
1215 ).where(collection_table
.c
.id==collection
.id))
1217 # Convert Collection Item
1218 collection_item_table
= inspect_table(metadata
, "core__collection_items")
1219 for ci
in db
.execute(collection_item_table
.select()):
1220 db
.execute(collection_item_table
.update().values(
1221 added
=dt_to_utc(ci
.added
)
1222 ).where(collection_item_table
.c
.id==ci
.id))
1224 # Convert Comment subscription
1225 comment_sub
= inspect_table(metadata
, "core__comment_subscriptions")
1226 for sub
in db
.execute(comment_sub
.select()):
1227 db
.execute(comment_sub
.update().values(
1228 created
=dt_to_utc(sub
.created
)
1229 ).where(comment_sub
.c
.id==sub
.id))
1231 # Convert Notification
1232 notification_table
= inspect_table(metadata
, "core__notifications")
1233 for notification
in db
.execute(notification_table
.select()):
1234 db
.execute(notification_table
.update().values(
1235 created
=dt_to_utc(notification
.created
)
1236 ).where(notification_table
.c
.id==notification
.id))
1238 # Convert ReportBase
1239 reportbase_table
= inspect_table(metadata
, "core__reports")
1240 for report
in db
.execute(reportbase_table
.select()):
1241 db
.execute(reportbase_table
.update().values(
1242 created
=dt_to_utc(report
.created
)
1243 ).where(reportbase_table
.c
.id==report
.id))
1246 generator_table
= inspect_table(metadata
, "core__generators")
1247 for generator
in db
.execute(generator_table
.select()):
1248 db
.execute(generator_table
.update().values(
1249 published
=dt_to_utc(generator
.published
),
1250 updated
=dt_to_utc(generator
.updated
)
1251 ).where(generator_table
.c
.id==generator
.id))
1254 activity_table
= inspect_table(metadata
, "core__activities")
1255 for activity
in db
.execute(activity_table
.select()):
1256 db
.execute(activity_table
.update().values(
1257 published
=dt_to_utc(activity
.published
),
1258 updated
=dt_to_utc(activity
.updated
)
1259 ).where(activity_table
.c
.id==activity
.id))
1261 # Commit this to the database
1265 # Migrations to handle migrating from activity specific foreign key to the
1266 # new GenericForeignKey implementations. They have been split up to improve
1267 # readability and minimise errors
1270 class GenericModelReference_V0(declarative_base()):
1271 __tablename__
= "core__generic_model_reference"
1273 id = Column(Integer
, primary_key
=True)
1274 obj_pk
= Column(Integer
, nullable
=False)
1275 model_type
= Column(Unicode
, nullable
=False)
1277 @RegisterMigration(27, MIGRATIONS
)
1278 def create_generic_model_reference(db
):
1279 """ Creates the Generic Model Reference table """
1280 GenericModelReference_V0
.__table__
.create(db
.bind
)
1283 @RegisterMigration(28, MIGRATIONS
)
1284 def add_foreign_key_fields(db
):
1286 Add the fields for GenericForeignKey to the model under temporary name,
1287 this is so that later a data migration can occur. They will be renamed to
1288 the origional names.
1290 metadata
= MetaData(bind
=db
.bind
)
1291 activity_table
= inspect_table(metadata
, "core__activities")
1293 # Create column and add to model.
1294 object_column
= Column("temp_object", Integer
, ForeignKey(GenericModelReference_V0
.id))
1295 object_column
.create(activity_table
)
1297 target_column
= Column("temp_target", Integer
, ForeignKey(GenericModelReference_V0
.id))
1298 target_column
.create(activity_table
)
1300 # Commit this to the database
1303 @RegisterMigration(29, MIGRATIONS
)
1304 def migrate_data_foreign_keys(db
):
1306 This will migrate the data from the old object and target attributes which
1307 use the old ActivityIntermediator to the new temparay fields which use the
1308 new GenericForeignKey.
1311 metadata
= MetaData(bind
=db
.bind
)
1312 activity_table
= inspect_table(metadata
, "core__activities")
1313 ai_table
= inspect_table(metadata
, "core__activity_intermediators")
1314 gmr_table
= inspect_table(metadata
, "core__generic_model_reference")
1317 # Iterate through all activities doing the migration per activity.
1318 for activity
in db
.execute(activity_table
.select()):
1319 # First do the "Activity.object" migration to "Activity.temp_object"
1320 # I need to get the object from the Activity, I can't use the old
1321 # Activity.get_object as we're in a migration.
1322 object_ai
= db
.execute(ai_table
.select(
1323 ai_table
.c
.id==activity
.object
1326 object_ai_type
= ActivityIntermediator_R0
.TYPES
[object_ai
.type]
1327 object_ai_table
= inspect_table(metadata
, object_ai_type
.__tablename
__)
1329 activity_object
= db
.execute(object_ai_table
.select(
1330 object_ai_table
.c
.activity
==object_ai
.id
1333 # now we need to create the GenericModelReference
1334 object_gmr
= db
.execute(gmr_table
.insert().values(
1335 obj_pk
=activity_object
.id,
1336 model_type
=object_ai_type
.__tablename
__
1339 # Now set the ID of the GenericModelReference in the GenericForignKey
1340 db
.execute(activity_table
.update().values(
1341 temp_object
=object_gmr
.inserted_primary_key
[0]
1344 # Now do same process for "Activity.target" to "Activity.temp_target"
1345 # not all Activities have a target so if it doesn't just skip the rest
1347 if activity
.target
is None:
1350 # Now get the target for the activity.
1351 target_ai
= db
.execute(ai_table
.select(
1352 ai_table
.c
.id==activity
.target
1355 target_ai_type
= ActivityIntermediator_R0
.TYPES
[target_ai
.type]
1356 target_ai_table
= inspect_table(metadata
, target_ai_type
.__tablename
__)
1358 activity_target
= db
.execute(target_ai_table
.select(
1359 target_ai_table
.c
.activity
==target_ai
.id
1362 # We now want to create the new target GenericModelReference
1363 target_gmr
= db
.execute(gmr_table
.insert().values(
1364 obj_pk
=activity_target
.id,
1365 model_type
=target_ai_type
.__tablename
__
1368 # Now set the ID of the GenericModelReference in the GenericForignKey
1369 db
.execute(activity_table
.update().values(
1370 temp_object
=target_gmr
.inserted_primary_key
[0]
1373 # Commit to the database.
1376 @RegisterMigration(30, MIGRATIONS
)
1377 def rename_and_remove_object_and_target(db
):
1379 Renames the new Activity.object and Activity.target fields and removes the
1382 metadata
= MetaData(bind
=db
.bind
)
1383 activity_table
= inspect_table(metadata
, "core__activities")
1385 # Firstly lets remove the old fields.
1386 old_object_column
= activity_table
.columns
["object"]
1387 old_target_column
= activity_table
.columns
["target"]
1390 old_object_column
.drop()
1391 old_target_column
.drop()
1393 # Now get the new columns.
1394 new_object_column
= activity_table
.columns
["temp_object"]
1395 new_target_column
= activity_table
.columns
["temp_target"]
1397 # rename them to the old names.
1398 new_object_column
.alter(name
="object_id")
1399 new_target_column
.alter(name
="target_id")
1401 # Commit the changes to the database.
1404 @RegisterMigration(31, MIGRATIONS
)
1405 def remove_activityintermediator(db
):
1407 This removes the old specific ActivityIntermediator model which has been
1408 superseeded by the GenericForeignKey field.
1410 metadata
= MetaData(bind
=db
.bind
)
1412 # Remove the columns which reference the AI
1413 collection_table
= inspect_table(metadata
, "core__collections")
1414 collection_ai_column
= collection_table
.columns
["activity"]
1415 collection_ai_column
.drop()
1417 media_entry_table
= inspect_table(metadata
, "core__media_entries")
1418 media_entry_ai_column
= media_entry_table
.columns
["activity"]
1419 media_entry_ai_column
.drop()
1421 comments_table
= inspect_table(metadata
, "core__media_comments")
1422 comments_ai_column
= comments_table
.columns
["activity"]
1423 comments_ai_column
.drop()
1425 user_table
= inspect_table(metadata
, "core__users")
1426 user_ai_column
= user_table
.columns
["activity"]
1427 user_ai_column
.drop()
1430 ai_table
= inspect_table(metadata
, "core__activity_intermediators")
1433 # Commit the changes
1437 # Migrations for converting the User model into a Local and Remote User
1441 class LocalUser_V0(declarative_base()):
1442 __tablename__
= "core__local_users"
1444 id = Column(Integer
, ForeignKey(User
.id), primary_key
=True)
1445 username
= Column(Unicode
, nullable
=False, unique
=True)
1446 email
= Column(Unicode
, nullable
=False)
1447 pw_hash
= Column(Unicode
)
1449 wants_comment_notification
= Column(Boolean
, default
=True)
1450 wants_notifications
= Column(Boolean
, default
=True)
1451 license_preference
= Column(Unicode
)
1452 uploaded
= Column(Integer
, default
=0)
1453 upload_limit
= Column(Integer
)
1455 class RemoteUser_V0(declarative_base()):
1456 __tablename__
= "core__remote_users"
1458 id = Column(Integer
, ForeignKey(User
.id), primary_key
=True)
1459 webfinger
= Column(Unicode
, unique
=True)
1461 @RegisterMigration(32, MIGRATIONS
)
1462 def federation_user_create_tables(db
):
1464 Create all the tables
1466 # Create tables needed
1467 LocalUser_V0
.__table__
.create(db
.bind
)
1468 RemoteUser_V0
.__table__
.create(db
.bind
)
1471 metadata
= MetaData(bind
=db
.bind
)
1472 user_table
= inspect_table(metadata
, "core__users")
1475 updated_column
= Column(
1478 default
=datetime
.datetime
.utcnow
1480 updated_column
.create(user_table
)
1482 type_column
= Column(
1486 type_column
.create(user_table
)
1488 name_column
= Column(
1492 name_column
.create(user_table
)
1496 @RegisterMigration(33, MIGRATIONS
)
1497 def federation_user_migrate_data(db
):
1499 Migrate the data over to the new user models
1501 metadata
= MetaData(bind
=db
.bind
)
1503 user_table
= inspect_table(metadata
, "core__users")
1504 local_user_table
= inspect_table(metadata
, "core__local_users")
1506 for user
in db
.execute(user_table
.select()):
1507 db
.execute(local_user_table
.insert().values(
1509 username
=user
.username
,
1511 pw_hash
=user
.pw_hash
,
1512 wants_comment_notification
=user
.wants_comment_notification
,
1513 wants_notifications
=user
.wants_notifications
,
1514 license_preference
=user
.license_preference
,
1515 uploaded
=user
.uploaded
,
1516 upload_limit
=user
.upload_limit
1519 db
.execute(user_table
.update().where(user_table
.c
.id==user
.id).values(
1520 updated
=user
.created
,
1521 type=LocalUser
.__mapper
_args
__["polymorphic_identity"]
1526 class User_vR2(declarative_base()):
1527 __tablename__
= "rename__users"
1529 id = Column(Integer
, primary_key
=True)
1530 url
= Column(Unicode
)
1531 bio
= Column(UnicodeText
)
1532 name
= Column(Unicode
)
1533 type = Column(Unicode
)
1534 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.utcnow
)
1535 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.utcnow
)
1536 location
= Column(Integer
, ForeignKey(Location
.id))
1538 @RegisterMigration(34, MIGRATIONS
)
1539 def federation_remove_fields(db
):
1541 This removes the fields from User model which aren't shared
1543 metadata
= MetaData(bind
=db
.bind
)
1545 user_table
= inspect_table(metadata
, "core__users")
1547 # Remove the columns moved to LocalUser from User
1548 username_column
= user_table
.columns
["username"]
1549 username_column
.drop()
1551 email_column
= user_table
.columns
["email"]
1554 pw_hash_column
= user_table
.columns
["pw_hash"]
1555 pw_hash_column
.drop()
1557 license_preference_column
= user_table
.columns
["license_preference"]
1558 license_preference_column
.drop()
1560 uploaded_column
= user_table
.columns
["uploaded"]
1561 uploaded_column
.drop()
1563 upload_limit_column
= user_table
.columns
["upload_limit"]
1564 upload_limit_column
.drop()
1566 # SQLLite can't drop booleans -.-
1567 if db
.bind
.url
.drivername
== 'sqlite':
1568 # Create the new hacky table
1569 User_vR2
.__table__
.create(db
.bind
)
1571 new_user_table
= inspect_table(metadata
, "rename__users")
1572 replace_table_hack(db
, user_table
, new_user_table
)
1574 wcn_column
= user_table
.columns
["wants_comment_notification"]
1577 wants_notifications_column
= user_table
.columns
["wants_notifications"]
1578 wants_notifications_column
.drop()
1582 @RegisterMigration(35, MIGRATIONS
)
1583 def federation_media_entry(db
):
1584 metadata
= MetaData(bind
=db
.bind
)
1585 media_entry_table
= inspect_table(metadata
, "core__media_entries")
1588 public_id_column
= Column(
1594 public_id_column
.create(
1596 unique_name
="media_public_id"
1599 remote_column
= Column(
1604 remote_column
.create(media_entry_table
)
1606 updated_column
= Column(
1609 default
=datetime
.datetime
.utcnow
,
1611 updated_column
.create(media_entry_table
)
1614 for entry
in db
.execute(media_entry_table
.select()):
1615 db
.execute(media_entry_table
.update().values(
1616 updated
=entry
.created
,
1622 @RegisterMigration(36, MIGRATIONS
)
1623 def create_oauth1_dummies(db
):
1625 Creates a dummy client, request and access tokens.
1627 This is used when invalid data is submitted but real clients and
1628 access tokens. The use of dummy objects prevents timing attacks.
1630 metadata
= MetaData(bind
=db
.bind
)
1631 client_table
= inspect_table(metadata
, "core__clients")
1632 request_token_table
= inspect_table(metadata
, "core__request_tokens")
1633 access_token_table
= inspect_table(metadata
, "core__access_tokens")
1635 # Whilst we don't rely on the secret key being unique or unknown to prevent
1636 # unauthorized clients from using it to authenticate, we still as an extra
1637 # layer of protection created a cryptographically secure key individual to
1638 # each instance that should never be able to be known.
1639 client_secret
= crypto
.random_string(50)
1640 request_token_secret
= crypto
.random_string(50)
1641 request_token_verifier
= crypto
.random_string(50)
1642 access_token_secret
= crypto
.random_string(50)
1644 # Dummy created/updated datetime object
1645 epoc_datetime
= datetime
.datetime
.fromtimestamp(0)
1647 # Create the dummy Client
1648 db
.execute(client_table
.insert().values(
1649 id=oauth
.DUMMY_CLIENT_ID
,
1650 secret
=client_secret
,
1651 application_type
="dummy",
1652 created
=epoc_datetime
,
1653 updated
=epoc_datetime
1656 # Create the dummy RequestToken
1657 db
.execute(request_token_table
.insert().values(
1658 token
=oauth
.DUMMY_REQUEST_TOKEN
,
1659 secret
=request_token_secret
,
1660 client
=oauth
.DUMMY_CLIENT_ID
,
1661 verifier
=request_token_verifier
,
1662 created
=epoc_datetime
,
1663 updated
=epoc_datetime
,
1667 # Create the dummy AccessToken
1668 db
.execute(access_token_table
.insert().values(
1669 token
=oauth
.DUMMY_ACCESS_TOKEN
,
1670 secret
=access_token_secret
,
1671 request_token
=oauth
.DUMMY_REQUEST_TOKEN
,
1672 created
=epoc_datetime
,
1673 updated
=epoc_datetime
1676 # Commit the changes