1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
27 from sqlalchemy
import (MetaData
, Table
, Column
, Boolean
, SmallInteger
,
28 Integer
, Unicode
, UnicodeText
, DateTime
,
29 ForeignKey
, Date
, Index
)
30 from sqlalchemy
.exc
import ProgrammingError
31 from sqlalchemy
.ext
.declarative
import declarative_base
32 from sqlalchemy
.sql
import and_
33 from sqlalchemy
.schema
import UniqueConstraint
35 from mediagoblin
.db
.extratypes
import JSONEncoded
, MutationDict
36 from mediagoblin
.db
.migration_tools
import (
37 RegisterMigration
, inspect_table
, replace_table_hack
)
38 from mediagoblin
.db
.models
import (MediaEntry
, Collection
, MediaComment
, User
,
39 Privilege
, Generator
, GenericForeignKey
)
40 from mediagoblin
.db
.extratypes
import JSONEncoded
, MutationDict
46 @RegisterMigration(1, MIGRATIONS
)
47 def ogg_to_webm_audio(db_conn
):
48 metadata
= MetaData(bind
=db_conn
.bind
)
50 file_keynames
= Table('core__file_keynames', metadata
, autoload
=True,
51 autoload_with
=db_conn
.bind
)
54 file_keynames
.update().where(file_keynames
.c
.name
== 'ogg').
55 values(name
='webm_audio')
60 @RegisterMigration(2, MIGRATIONS
)
61 def add_wants_notification_column(db_conn
):
62 metadata
= MetaData(bind
=db_conn
.bind
)
64 users
= Table('core__users', metadata
, autoload
=True,
65 autoload_with
=db_conn
.bind
)
67 col
= Column('wants_comment_notification', Boolean
,
68 default
=True, nullable
=True)
69 col
.create(users
, populate_defaults
=True)
73 @RegisterMigration(3, MIGRATIONS
)
74 def add_transcoding_progress(db_conn
):
75 metadata
= MetaData(bind
=db_conn
.bind
)
77 media_entry
= inspect_table(metadata
, 'core__media_entries')
79 col
= Column('transcoding_progress', SmallInteger
)
80 col
.create(media_entry
)
84 class Collection_v0(declarative_base()):
85 __tablename__
= "core__collections"
87 id = Column(Integer
, primary_key
=True)
88 title
= Column(Unicode
, nullable
=False)
89 slug
= Column(Unicode
)
90 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
,
92 description
= Column(UnicodeText
)
93 creator
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
94 items
= Column(Integer
, default
=0)
96 class CollectionItem_v0(declarative_base()):
97 __tablename__
= "core__collection_items"
99 id = Column(Integer
, primary_key
=True)
100 media_entry
= Column(
101 Integer
, ForeignKey(MediaEntry
.id), nullable
=False, index
=True)
102 collection
= Column(Integer
, ForeignKey(Collection
.id), nullable
=False)
103 note
= Column(UnicodeText
, nullable
=True)
104 added
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
105 position
= Column(Integer
)
107 ## This should be activated, normally.
108 ## But this would change the way the next migration used to work.
109 ## So it's commented for now.
111 UniqueConstraint('collection', 'media_entry'),
114 collectionitem_unique_constraint_done
= False
116 @RegisterMigration(4, MIGRATIONS
)
117 def add_collection_tables(db_conn
):
118 Collection_v0
.__table__
.create(db_conn
.bind
)
119 CollectionItem_v0
.__table__
.create(db_conn
.bind
)
121 global collectionitem_unique_constraint_done
122 collectionitem_unique_constraint_done
= True
127 @RegisterMigration(5, MIGRATIONS
)
128 def add_mediaentry_collected(db_conn
):
129 metadata
= MetaData(bind
=db_conn
.bind
)
131 media_entry
= inspect_table(metadata
, 'core__media_entries')
133 col
= Column('collected', Integer
, default
=0)
134 col
.create(media_entry
)
138 class ProcessingMetaData_v0(declarative_base()):
139 __tablename__
= 'core__processing_metadata'
141 id = Column(Integer
, primary_key
=True)
142 media_entry_id
= Column(Integer
, ForeignKey(MediaEntry
.id), nullable
=False,
144 callback_url
= Column(Unicode
)
146 @RegisterMigration(6, MIGRATIONS
)
147 def create_processing_metadata_table(db
):
148 ProcessingMetaData_v0
.__table__
.create(db
.bind
)
152 # Okay, problem being:
153 # Migration #4 forgot to add the uniqueconstraint for the
154 # new tables. While creating the tables from scratch had
155 # the constraint enabled.
157 # So we have four situations that should end up at the same
161 # Well, easy. Just uses the tables in models.py
162 # 2. Fresh install using a git version just before this migration
163 # The tables are all there, the unique constraint is also there.
164 # This migration should do nothing.
165 # But as we can't detect the uniqueconstraint easily,
166 # this migration just adds the constraint again.
167 # And possibly fails very loud. But ignores the failure.
168 # 3. old install, not using git, just releases.
169 # This one will get the new tables in #4 (now with constraint!)
170 # And this migration is just skipped silently.
171 # 4. old install, always on latest git.
172 # This one has the tables, but lacks the constraint.
173 # So this migration adds the constraint.
174 @RegisterMigration(7, MIGRATIONS
)
175 def fix_CollectionItem_v0_constraint(db_conn
):
176 """Add the forgotten Constraint on CollectionItem"""
178 global collectionitem_unique_constraint_done
179 if collectionitem_unique_constraint_done
:
180 # Reset it. Maybe the whole thing gets run again
181 # For a different db?
182 collectionitem_unique_constraint_done
= False
185 metadata
= MetaData(bind
=db_conn
.bind
)
187 CollectionItem_table
= inspect_table(metadata
, 'core__collection_items')
189 constraint
= UniqueConstraint('collection', 'media_entry',
190 name
='core__collection_items_collection_media_entry_key',
191 table
=CollectionItem_table
)
195 except ProgrammingError
:
196 # User probably has an install that was run since the
197 # collection tables were added, so we don't need to run this migration.
203 @RegisterMigration(8, MIGRATIONS
)
204 def add_license_preference(db
):
205 metadata
= MetaData(bind
=db
.bind
)
207 user_table
= inspect_table(metadata
, 'core__users')
209 col
= Column('license_preference', Unicode
)
210 col
.create(user_table
)
214 @RegisterMigration(9, MIGRATIONS
)
215 def mediaentry_new_slug_era(db
):
217 Update for the new era for media type slugs.
219 Entries without slugs now display differently in the url like:
222 ... because of this, we should back-convert:
223 - entries without slugs should be converted to use the id, if possible, to
224 make old urls still work
225 - slugs with = (or also : which is now also not allowed) to have those
226 stripped out (small possibility of breakage here sadly)
229 def slug_and_user_combo_exists(slug
, uploader
):
232 and_(media_table
.c
.uploader
==uploader
,
233 media_table
.c
.slug
==slug
))).first() is not None
235 def append_garbage_till_unique(row
, new_slug
):
237 Attach junk to this row until it's unique, then save it
239 if slug_and_user_combo_exists(new_slug
, row
.uploader
):
240 # okay, still no success;
241 # let's whack junk on there till it's unique.
242 new_slug
+= '-' + uuid
.uuid4().hex[:4]
243 # keep going if necessary!
244 while slug_and_user_combo_exists(new_slug
, row
.uploader
):
245 new_slug
+= uuid
.uuid4().hex[:4]
248 media_table
.update(). \
249 where(media_table
.c
.id==row
.id). \
250 values(slug
=new_slug
))
252 metadata
= MetaData(bind
=db
.bind
)
254 media_table
= inspect_table(metadata
, 'core__media_entries')
256 for row
in db
.execute(media_table
.select()):
257 # no slug, try setting to an id
259 append_garbage_till_unique(row
, six
.text_type(row
.id))
260 # has "=" or ":" in it... we're getting rid of those
261 elif u
"=" in row
.slug
or u
":" in row
.slug
:
262 append_garbage_till_unique(
263 row
, row
.slug
.replace(u
"=", u
"-").replace(u
":", u
"-"))
268 @RegisterMigration(10, MIGRATIONS
)
269 def unique_collections_slug(db
):
270 """Add unique constraint to collection slug"""
271 metadata
= MetaData(bind
=db
.bind
)
272 collection_table
= inspect_table(metadata
, "core__collections")
276 for row
in db
.execute(collection_table
.select()):
277 # if duplicate slug, generate a unique slug
278 if row
.creator
in existing_slugs
and row
.slug
in \
279 existing_slugs
[row
.creator
]:
280 slugs_to_change
.append(row
.id)
282 if not row
.creator
in existing_slugs
:
283 existing_slugs
[row
.creator
] = [row
.slug
]
285 existing_slugs
[row
.creator
].append(row
.slug
)
287 for row_id
in slugs_to_change
:
288 new_slug
= six
.text_type(uuid
.uuid4())
289 db
.execute(collection_table
.update().
290 where(collection_table
.c
.id == row_id
).
291 values(slug
=new_slug
))
292 # sqlite does not like to change the schema when a transaction(update) is
296 constraint
= UniqueConstraint('creator', 'slug',
297 name
='core__collection_creator_slug_key',
298 table
=collection_table
)
303 @RegisterMigration(11, MIGRATIONS
)
304 def drop_token_related_User_columns(db
):
306 Drop unneeded columns from the User table after switching to using
307 itsdangerous tokens for email and forgot password verification.
309 metadata
= MetaData(bind
=db
.bind
)
310 user_table
= inspect_table(metadata
, 'core__users')
312 verification_key
= user_table
.columns
['verification_key']
313 fp_verification_key
= user_table
.columns
['fp_verification_key']
314 fp_token_expire
= user_table
.columns
['fp_token_expire']
316 verification_key
.drop()
317 fp_verification_key
.drop()
318 fp_token_expire
.drop()
323 class CommentSubscription_v0(declarative_base()):
324 __tablename__
= 'core__comment_subscriptions'
325 id = Column(Integer
, primary_key
=True)
327 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
329 media_entry_id
= Column(Integer
, ForeignKey(MediaEntry
.id), nullable
=False)
331 user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
333 notify
= Column(Boolean
, nullable
=False, default
=True)
334 send_email
= Column(Boolean
, nullable
=False, default
=True)
337 class Notification_v0(declarative_base()):
338 __tablename__
= 'core__notifications'
339 id = Column(Integer
, primary_key
=True)
340 type = Column(Unicode
)
342 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
344 user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False,
346 seen
= Column(Boolean
, default
=lambda: False, index
=True)
349 class CommentNotification_v0(Notification_v0
):
350 __tablename__
= 'core__comment_notifications'
351 id = Column(Integer
, ForeignKey(Notification_v0
.id), primary_key
=True)
353 subject_id
= Column(Integer
, ForeignKey(MediaComment
.id))
356 class ProcessingNotification_v0(Notification_v0
):
357 __tablename__
= 'core__processing_notifications'
359 id = Column(Integer
, ForeignKey(Notification_v0
.id), primary_key
=True)
361 subject_id
= Column(Integer
, ForeignKey(MediaEntry
.id))
364 @RegisterMigration(12, MIGRATIONS
)
365 def add_new_notification_tables(db
):
366 metadata
= MetaData(bind
=db
.bind
)
368 user_table
= inspect_table(metadata
, 'core__users')
369 mediaentry_table
= inspect_table(metadata
, 'core__media_entries')
370 mediacomment_table
= inspect_table(metadata
, 'core__media_comments')
372 CommentSubscription_v0
.__table__
.create(db
.bind
)
374 Notification_v0
.__table__
.create(db
.bind
)
375 CommentNotification_v0
.__table__
.create(db
.bind
)
376 ProcessingNotification_v0
.__table__
.create(db
.bind
)
381 @RegisterMigration(13, MIGRATIONS
)
382 def pw_hash_nullable(db
):
383 """Make pw_hash column nullable"""
384 metadata
= MetaData(bind
=db
.bind
)
385 user_table
= inspect_table(metadata
, "core__users")
387 user_table
.c
.pw_hash
.alter(nullable
=True)
389 # sqlite+sqlalchemy seems to drop this constraint during the
390 # migration, so we add it back here for now a bit manually.
391 if db
.bind
.url
.drivername
== 'sqlite':
392 constraint
= UniqueConstraint('username', table
=user_table
)
399 class Client_v0(declarative_base()):
401 Model representing a client - Used for API Auth
403 __tablename__
= "core__clients"
405 id = Column(Unicode
, nullable
=True, primary_key
=True)
406 secret
= Column(Unicode
, nullable
=False)
407 expirey
= Column(DateTime
, nullable
=True)
408 application_type
= Column(Unicode
, nullable
=False)
409 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
410 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
413 redirect_uri
= Column(JSONEncoded
, nullable
=True)
414 logo_url
= Column(Unicode
, nullable
=True)
415 application_name
= Column(Unicode
, nullable
=True)
416 contacts
= Column(JSONEncoded
, nullable
=True)
419 if self
.application_name
:
420 return "<Client {0} - {1}>".format(self
.application_name
, self
.id)
422 return "<Client {0}>".format(self
.id)
424 class RequestToken_v0(declarative_base()):
426 Model for representing the request tokens
428 __tablename__
= "core__request_tokens"
430 token
= Column(Unicode
, primary_key
=True)
431 secret
= Column(Unicode
, nullable
=False)
432 client
= Column(Unicode
, ForeignKey(Client_v0
.id))
433 user
= Column(Integer
, ForeignKey(User
.id), nullable
=True)
434 used
= Column(Boolean
, default
=False)
435 authenticated
= Column(Boolean
, default
=False)
436 verifier
= Column(Unicode
, nullable
=True)
437 callback
= Column(Unicode
, nullable
=False, default
=u
"oob")
438 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
439 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
441 class AccessToken_v0(declarative_base()):
443 Model for representing the access tokens
445 __tablename__
= "core__access_tokens"
447 token
= Column(Unicode
, nullable
=False, primary_key
=True)
448 secret
= Column(Unicode
, nullable
=False)
449 user
= Column(Integer
, ForeignKey(User
.id))
450 request_token
= Column(Unicode
, ForeignKey(RequestToken_v0
.token
))
451 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
452 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
455 class NonceTimestamp_v0(declarative_base()):
457 A place the timestamp and nonce can be stored - this is for OAuth1
459 __tablename__
= "core__nonce_timestamps"
461 nonce
= Column(Unicode
, nullable
=False, primary_key
=True)
462 timestamp
= Column(DateTime
, nullable
=False, primary_key
=True)
465 @RegisterMigration(14, MIGRATIONS
)
466 def create_oauth1_tables(db
):
467 """ Creates the OAuth1 tables """
469 Client_v0
.__table__
.create(db
.bind
)
470 RequestToken_v0
.__table__
.create(db
.bind
)
471 AccessToken_v0
.__table__
.create(db
.bind
)
472 NonceTimestamp_v0
.__table__
.create(db
.bind
)
476 @RegisterMigration(15, MIGRATIONS
)
477 def wants_notifications(db
):
478 """Add a wants_notifications field to User model"""
479 metadata
= MetaData(bind
=db
.bind
)
480 user_table
= inspect_table(metadata
, "core__users")
481 col
= Column('wants_notifications', Boolean
, default
=True)
482 col
.create(user_table
)
487 @RegisterMigration(16, MIGRATIONS
)
488 def upload_limits(db
):
489 """Add user upload limit columns"""
490 metadata
= MetaData(bind
=db
.bind
)
492 user_table
= inspect_table(metadata
, 'core__users')
493 media_entry_table
= inspect_table(metadata
, 'core__media_entries')
495 col
= Column('uploaded', Integer
, default
=0)
496 col
.create(user_table
)
498 col
= Column('upload_limit', Integer
)
499 col
.create(user_table
)
501 col
= Column('file_size', Integer
, default
=0)
502 col
.create(media_entry_table
)
507 @RegisterMigration(17, MIGRATIONS
)
508 def add_file_metadata(db
):
509 """Add file_metadata to MediaFile"""
510 metadata
= MetaData(bind
=db
.bind
)
511 media_file_table
= inspect_table(metadata
, "core__mediafiles")
513 col
= Column('file_metadata', MutationDict
.as_mutable(JSONEncoded
))
514 col
.create(media_file_table
)
522 class ReportBase_v0(declarative_base()):
523 __tablename__
= 'core__reports'
524 id = Column(Integer
, primary_key
=True)
525 reporter_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
526 report_content
= Column(UnicodeText
)
527 reported_user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
528 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
529 discriminator
= Column('type', Unicode(50))
530 resolver_id
= Column(Integer
, ForeignKey(User
.id))
531 resolved
= Column(DateTime
)
532 result
= Column(UnicodeText
)
533 __mapper_args__
= {'polymorphic_on': discriminator
}
536 class CommentReport_v0(ReportBase_v0
):
537 __tablename__
= 'core__reports_on_comments'
538 __mapper_args__
= {'polymorphic_identity': 'comment_report'}
540 id = Column('id',Integer
, ForeignKey('core__reports.id'),
542 comment_id
= Column(Integer
, ForeignKey(MediaComment
.id), nullable
=True)
545 class MediaReport_v0(ReportBase_v0
):
546 __tablename__
= 'core__reports_on_media'
547 __mapper_args__
= {'polymorphic_identity': 'media_report'}
549 id = Column('id',Integer
, ForeignKey('core__reports.id'), primary_key
=True)
550 media_entry_id
= Column(Integer
, ForeignKey(MediaEntry
.id), nullable
=True)
553 class UserBan_v0(declarative_base()):
554 __tablename__
= 'core__user_bans'
555 user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False,
557 expiration_date
= Column(Date
)
558 reason
= Column(UnicodeText
, nullable
=False)
561 class Privilege_v0(declarative_base()):
562 __tablename__
= 'core__privileges'
563 id = Column(Integer
, nullable
=False, primary_key
=True, unique
=True)
564 privilege_name
= Column(Unicode
, nullable
=False, unique
=True)
567 class PrivilegeUserAssociation_v0(declarative_base()):
568 __tablename__
= 'core__privileges_users'
569 privilege_id
= Column(
570 'core__privilege_id',
577 ForeignKey(Privilege
.id),
581 PRIVILEGE_FOUNDATIONS_v0
= [{'privilege_name':u
'admin'},
582 {'privilege_name':u
'moderator'},
583 {'privilege_name':u
'uploader'},
584 {'privilege_name':u
'reporter'},
585 {'privilege_name':u
'commenter'},
586 {'privilege_name':u
'active'}]
588 # vR1 stands for "version Rename 1". This only exists because we need
589 # to deal with dropping some booleans and it's otherwise impossible
592 class User_vR1(declarative_base()):
593 __tablename__
= 'rename__users'
594 id = Column(Integer
, primary_key
=True)
595 username
= Column(Unicode
, nullable
=False, unique
=True)
596 email
= Column(Unicode
, nullable
=False)
597 pw_hash
= Column(Unicode
)
598 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
599 wants_comment_notification
= Column(Boolean
, default
=True)
600 wants_notifications
= Column(Boolean
, default
=True)
601 license_preference
= Column(Unicode
)
602 url
= Column(Unicode
)
603 bio
= Column(UnicodeText
) # ??
604 uploaded
= Column(Integer
, default
=0)
605 upload_limit
= Column(Integer
)
608 @RegisterMigration(18, MIGRATIONS
)
609 def create_moderation_tables(db
):
611 # First, we will create the new tables in the database.
612 #--------------------------------------------------------------------------
613 ReportBase_v0
.__table__
.create(db
.bind
)
614 CommentReport_v0
.__table__
.create(db
.bind
)
615 MediaReport_v0
.__table__
.create(db
.bind
)
616 UserBan_v0
.__table__
.create(db
.bind
)
617 Privilege_v0
.__table__
.create(db
.bind
)
618 PrivilegeUserAssociation_v0
.__table__
.create(db
.bind
)
622 # Then initialize the tables that we will later use
623 #--------------------------------------------------------------------------
624 metadata
= MetaData(bind
=db
.bind
)
625 privileges_table
= inspect_table(metadata
, "core__privileges")
626 user_table
= inspect_table(metadata
, 'core__users')
627 user_privilege_assoc
= inspect_table(
628 metadata
, 'core__privileges_users')
630 # This section initializes the default Privilege foundations, that
631 # would be created through the FOUNDATIONS system in a new instance
632 #--------------------------------------------------------------------------
633 for parameters
in PRIVILEGE_FOUNDATIONS_v0
:
634 db
.execute(privileges_table
.insert().values(**parameters
))
638 # This next section takes the information from the old is_admin and status
639 # columns and converts those to the new privilege system
640 #--------------------------------------------------------------------------
641 admin_users_ids
, active_users_ids
, inactive_users_ids
= (
643 user_table
.select().where(
644 user_table
.c
.is_admin
==True)).fetchall(),
646 user_table
.select().where(
647 user_table
.c
.is_admin
==False).where(
648 user_table
.c
.status
==u
"active")).fetchall(),
650 user_table
.select().where(
651 user_table
.c
.is_admin
==False).where(
652 user_table
.c
.status
!=u
"active")).fetchall())
654 # Get the ids for each of the privileges so we can reference them ~~~~~~~~~
655 (admin_privilege_id
, uploader_privilege_id
,
656 reporter_privilege_id
, commenter_privilege_id
,
657 active_privilege_id
) = [
658 db
.execute(privileges_table
.select().where(
659 privileges_table
.c
.privilege_name
==privilege_name
)).first()['id']
660 for privilege_name
in
661 [u
"admin",u
"uploader",u
"reporter",u
"commenter",u
"active"]
664 # Give each user the appopriate privileges depending whether they are an
665 # admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
666 for admin_user
in admin_users_ids
:
667 admin_user_id
= admin_user
['id']
668 for privilege_id
in [admin_privilege_id
, uploader_privilege_id
,
669 reporter_privilege_id
, commenter_privilege_id
,
670 active_privilege_id
]:
671 db
.execute(user_privilege_assoc
.insert().values(
672 core__privilege_id
=admin_user_id
,
673 core__user_id
=privilege_id
))
675 for active_user
in active_users_ids
:
676 active_user_id
= active_user
['id']
677 for privilege_id
in [uploader_privilege_id
, reporter_privilege_id
,
678 commenter_privilege_id
, active_privilege_id
]:
679 db
.execute(user_privilege_assoc
.insert().values(
680 core__privilege_id
=active_user_id
,
681 core__user_id
=privilege_id
))
683 for inactive_user
in inactive_users_ids
:
684 inactive_user_id
= inactive_user
['id']
685 for privilege_id
in [uploader_privilege_id
, reporter_privilege_id
,
686 commenter_privilege_id
]:
687 db
.execute(user_privilege_assoc
.insert().values(
688 core__privilege_id
=inactive_user_id
,
689 core__user_id
=privilege_id
))
693 # And then, once the information is taken from is_admin & status columns
694 # we drop all of the vestigial columns from the User table.
695 #--------------------------------------------------------------------------
696 if db
.bind
.url
.drivername
== 'sqlite':
697 # SQLite has some issues that make it *impossible* to drop boolean
698 # columns. So, the following code is a very hacky workaround which
699 # makes it possible. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
701 User_vR1
.__table__
.create(db
.bind
)
703 new_user_table
= inspect_table(metadata
, 'rename__users')
704 replace_table_hack(db
, user_table
, new_user_table
)
706 # If the db is not run using SQLite, this process is much simpler ~~~~~
708 status
= user_table
.columns
['status']
709 email_verified
= user_table
.columns
['email_verified']
710 is_admin
= user_table
.columns
['is_admin']
712 email_verified
.drop()
718 @RegisterMigration(19, MIGRATIONS
)
719 def drop_MediaEntry_collected(db
):
721 Drop unused MediaEntry.collected column
723 metadata
= MetaData(bind
=db
.bind
)
725 media_collected
= inspect_table(metadata
, 'core__media_entries')
726 media_collected
= media_collected
.columns
['collected']
728 media_collected
.drop()
733 @RegisterMigration(20, MIGRATIONS
)
734 def add_metadata_column(db
):
735 metadata
= MetaData(bind
=db
.bind
)
737 media_entry
= inspect_table(metadata
, 'core__media_entries')
739 col
= Column('media_metadata', MutationDict
.as_mutable(JSONEncoded
),
740 default
=MutationDict())
741 col
.create(media_entry
)
746 class PrivilegeUserAssociation_R1(declarative_base()):
747 __tablename__
= 'rename__privileges_users'
756 ForeignKey(Privilege
.id),
759 @RegisterMigration(21, MIGRATIONS
)
760 def fix_privilege_user_association_table(db
):
762 There was an error in the PrivilegeUserAssociation table that allowed for a
763 dangerous sql error. We need to the change the name of the columns to be
764 unique, and properly referenced.
766 metadata
= MetaData(bind
=db
.bind
)
768 privilege_user_assoc
= inspect_table(
769 metadata
, 'core__privileges_users')
771 # This whole process is more complex if we're dealing with sqlite
772 if db
.bind
.url
.drivername
== 'sqlite':
773 PrivilegeUserAssociation_R1
.__table__
.create(db
.bind
)
776 new_privilege_user_assoc
= inspect_table(
777 metadata
, 'rename__privileges_users')
778 result
= db
.execute(privilege_user_assoc
.select())
780 # The columns were improperly named before, so we switch the columns
781 user_id
, priv_id
= row
['core__privilege_id'], row
['core__user_id']
782 db
.execute(new_privilege_user_assoc
.insert().values(
788 privilege_user_assoc
.drop()
789 new_privilege_user_assoc
.rename('core__privileges_users')
791 # much simpler if postgres though!
793 privilege_user_assoc
.c
.core__user_id
.alter(name
="privilege")
794 privilege_user_assoc
.c
.core__privilege_id
.alter(name
="user")
799 @RegisterMigration(22, MIGRATIONS
)
800 def add_index_username_field(db
):
802 This migration has been found to be doing the wrong thing. See
803 the documentation in migration 23 (revert_username_index) below
804 which undoes this for those databases that did run this migration.
807 This indexes the User.username field which is frequently queried
808 for example a user logging in. This solves the issue #894
810 ## This code is left commented out *on purpose!*
812 ## We do not normally allow commented out code like this in
813 ## MediaGoblin but this is a special case: since this migration has
814 ## been nullified but with great work to set things back below,
815 ## this is commented out for historical clarity.
817 # metadata = MetaData(bind=db.bind)
818 # user_table = inspect_table(metadata, "core__users")
820 # new_index = Index("ix_core__users_uploader", user_table.c.username)
827 @RegisterMigration(23, MIGRATIONS
)
828 def revert_username_index(db
):
830 Revert the stuff we did in migration 22 above.
832 There were a couple of problems with what we did:
833 - There was never a need for this migration! The unique
834 constraint had an implicit b-tree index, so it wasn't really
835 needed. (This is my (Chris Webber's) fault for suggesting it
836 needed to happen without knowing what's going on... my bad!)
837 - On top of that, databases created after the models.py was
838 changed weren't the same as those that had been run through
841 As such, we're setting things back to the way they were before,
842 but as it turns out, that's tricky to do!
844 metadata
= MetaData(bind
=db
.bind
)
845 user_table
= inspect_table(metadata
, "core__users")
847 [(index
.name
, index
) for index
in user_table
.indexes
])
849 # index from unnecessary migration
850 users_uploader_index
= indexes
.get(u
'ix_core__users_uploader')
851 # index created from models.py after (unique=True, index=True)
852 # was set in models.py
853 users_username_index
= indexes
.get(u
'ix_core__users_username')
855 if users_uploader_index
is None and users_username_index
is None:
856 # We don't need to do anything.
857 # The database isn't in a state where it needs fixing
859 # (ie, either went through the previous borked migration or
860 # was initialized with a models.py where core__users was both
861 # unique=True and index=True)
864 if db
.bind
.url
.drivername
== 'sqlite':
865 # Again, sqlite has problems. So this is tricky.
867 # Yes, this is correct to use User_vR1! Nothing has changed
868 # between the *correct* version of this table and migration 18.
869 User_vR1
.__table__
.create(db
.bind
)
871 new_user_table
= inspect_table(metadata
, 'rename__users')
872 replace_table_hack(db
, user_table
, new_user_table
)
875 # If the db is not run using SQLite, we don't need to do crazy
878 # Remove whichever of the not-used indexes are in place
879 if users_uploader_index
is not None:
880 users_uploader_index
.drop()
881 if users_username_index
is not None:
882 users_username_index
.drop()
884 # Given we're removing indexes then adding a unique constraint
885 # which *we know might fail*, thus probably rolling back the
886 # session, let's commit here.
890 # Add the unique constraint
891 constraint
= UniqueConstraint(
892 'username', table
=user_table
)
894 except ProgrammingError
:
895 # constraint already exists, no need to add
900 class Generator_R0(declarative_base()):
901 __tablename__
= "core__generators"
902 id = Column(Integer
, primary_key
=True)
903 name
= Column(Unicode
, nullable
=False)
904 published
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
905 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
906 object_type
= Column(Unicode
, nullable
=False)
908 class ActivityIntermediator_R0(declarative_base()):
909 __tablename__
= "core__activity_intermediators"
910 id = Column(Integer
, primary_key
=True)
911 type = Column(Unicode
, nullable
=False)
913 # These are needed for migration 29
917 "comment": MediaComment
,
918 "collection": Collection
,
921 class Activity_R0(declarative_base()):
922 __tablename__
= "core__activities"
923 id = Column(Integer
, primary_key
=True)
924 actor
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
925 published
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
926 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
927 verb
= Column(Unicode
, nullable
=False)
928 content
= Column(Unicode
, nullable
=True)
929 title
= Column(Unicode
, nullable
=True)
930 generator
= Column(Integer
, ForeignKey(Generator_R0
.id), nullable
=True)
931 object = Column(Integer
,
932 ForeignKey(ActivityIntermediator_R0
.id),
934 target
= Column(Integer
,
935 ForeignKey(ActivityIntermediator_R0
.id),
939 @RegisterMigration(24, MIGRATIONS
)
940 def activity_migration(db
):
942 Creates everything to create activities in GMG
943 - Adds Activity, ActivityIntermediator and Generator table
944 - Creates GMG service generator for activities produced by the server
945 - Adds the activity_as_object and activity_as_target to objects/targets
946 - Retroactively adds activities for what we can acurately work out
948 # Set constants we'll use later
949 FOREIGN_KEY
= "core__activity_intermediators.id"
950 ACTIVITY_COLUMN
= "activity"
952 # Create the new tables.
953 ActivityIntermediator_R0
.__table__
.create(db
.bind
)
954 Generator_R0
.__table__
.create(db
.bind
)
955 Activity_R0
.__table__
.create(db
.bind
)
958 # Initiate the tables we want to use later
959 metadata
= MetaData(bind
=db
.bind
)
960 user_table
= inspect_table(metadata
, "core__users")
961 activity_table
= inspect_table(metadata
, "core__activities")
962 generator_table
= inspect_table(metadata
, "core__generators")
963 collection_table
= inspect_table(metadata
, "core__collections")
964 media_entry_table
= inspect_table(metadata
, "core__media_entries")
965 media_comments_table
= inspect_table(metadata
, "core__media_comments")
966 ai_table
= inspect_table(metadata
, "core__activity_intermediators")
969 # Create the foundations for Generator
970 db
.execute(generator_table
.insert().values(
971 name
="GNU Mediagoblin",
972 object_type
="service",
973 published
=datetime
.datetime
.now(),
974 updated
=datetime
.datetime
.now()
978 # Get the ID of that generator
979 gmg_generator
= db
.execute(generator_table
.select(
980 generator_table
.c
.name
==u
"GNU Mediagoblin")).first()
983 # Now we want to modify the tables which MAY have an activity at some point
984 media_col
= Column(ACTIVITY_COLUMN
, Integer
, ForeignKey(FOREIGN_KEY
))
985 media_col
.create(media_entry_table
)
987 user_col
= Column(ACTIVITY_COLUMN
, Integer
, ForeignKey(FOREIGN_KEY
))
988 user_col
.create(user_table
)
990 comments_col
= Column(ACTIVITY_COLUMN
, Integer
, ForeignKey(FOREIGN_KEY
))
991 comments_col
.create(media_comments_table
)
993 collection_col
= Column(ACTIVITY_COLUMN
, Integer
, ForeignKey(FOREIGN_KEY
))
994 collection_col
.create(collection_table
)
998 # Now we want to retroactively add what activities we can
999 # first we'll add activities when people uploaded media.
1000 # these can't have content as it's not fesible to get the
1001 # correct content strings.
1002 for media
in db
.execute(media_entry_table
.select()):
1003 # Now we want to create the intermedaitory
1004 db_ai
= db
.execute(ai_table
.insert().values(
1007 db_ai
= db
.execute(ai_table
.select(
1008 ai_table
.c
.id==db_ai
.inserted_primary_key
[0]
1014 "actor": media
.uploader
,
1015 "published": media
.created
,
1016 "updated": media
.created
,
1017 "generator": gmg_generator
.id,
1020 db
.execute(activity_table
.insert().values(**activity
))
1022 # Add the AI to the media.
1023 db
.execute(media_entry_table
.update().values(
1025 ).where(media_entry_table
.c
.id==media
.id))
1027 # Now we want to add all the comments people made
1028 for comment
in db
.execute(media_comments_table
.select()):
1029 # Get the MediaEntry for the comment
1030 media_entry
= db
.execute(
1031 media_entry_table
.select(
1032 media_entry_table
.c
.id==comment
.media_entry
1035 # Create an AI for target
1036 db_ai_media
= db
.execute(ai_table
.select(
1037 ai_table
.c
.id==media_entry
.activity
1041 media_comments_table
.update().values(
1042 activity
=db_ai_media
1043 ).where(media_comments_table
.c
.id==media_entry
.id))
1045 # Now create the AI for the comment
1046 db_ai_comment
= db
.execute(ai_table
.insert().values(
1048 )).inserted_primary_key
[0]
1052 "actor": comment
.author
,
1053 "published": comment
.created
,
1054 "updated": comment
.created
,
1055 "generator": gmg_generator
.id,
1056 "object": db_ai_comment
,
1057 "target": db_ai_media
,
1060 # Now add the comment object
1061 db
.execute(activity_table
.insert().values(**activity
))
1063 # Now add activity to comment
1064 db
.execute(media_comments_table
.update().values(
1065 activity
=db_ai_comment
1066 ).where(media_comments_table
.c
.id==comment
.id))
1068 # Create 'create' activities for all collections
1069 for collection
in db
.execute(collection_table
.select()):
1071 db_ai
= db
.execute(ai_table
.insert().values(
1074 db_ai
= db
.execute(ai_table
.select(
1075 ai_table
.c
.id==db_ai
.inserted_primary_key
[0]
1078 # Now add link the collection to the AI
1079 db
.execute(collection_table
.update().values(
1081 ).where(collection_table
.c
.id==collection
.id))
1085 "actor": collection
.creator
,
1086 "published": collection
.created
,
1087 "updated": collection
.created
,
1088 "generator": gmg_generator
.id,
1092 db
.execute(activity_table
.insert().values(**activity
))
1094 # Now add the activity to the collection
1095 db
.execute(collection_table
.update().values(
1097 ).where(collection_table
.c
.id==collection
.id))
1101 class Location_V0(declarative_base()):
1102 __tablename__
= "core__locations"
1103 id = Column(Integer
, primary_key
=True)
1104 name
= Column(Unicode
)
1105 position
= Column(MutationDict
.as_mutable(JSONEncoded
))
1106 address
= Column(MutationDict
.as_mutable(JSONEncoded
))
1108 @RegisterMigration(25, MIGRATIONS
)
1109 def add_location_model(db
):
1110 """ Add location model """
1111 metadata
= MetaData(bind
=db
.bind
)
1113 # Create location table
1114 Location_V0
.__table__
.create(db
.bind
)
1117 # Inspect the tables we need
1118 user
= inspect_table(metadata
, "core__users")
1119 collections
= inspect_table(metadata
, "core__collections")
1120 media_entry
= inspect_table(metadata
, "core__media_entries")
1121 media_comments
= inspect_table(metadata
, "core__media_comments")
1123 # Now add location support to the various models
1124 col
= Column("location", Integer
, ForeignKey(Location_V0
.id))
1127 col
= Column("location", Integer
, ForeignKey(Location_V0
.id))
1128 col
.create(collections
)
1130 col
= Column("location", Integer
, ForeignKey(Location_V0
.id))
1131 col
.create(media_entry
)
1133 col
= Column("location", Integer
, ForeignKey(Location_V0
.id))
1134 col
.create(media_comments
)
1138 @RegisterMigration(26, MIGRATIONS
)
1139 def datetime_to_utc(db
):
1140 """ Convert datetime stamps to UTC """
1141 # Get the server's timezone, this is what the database has stored
1142 server_timezone
= dateutil
.tz
.tzlocal()
1145 # Look up all the timestamps and convert them to UTC
1147 metadata
= MetaData(bind
=db
.bind
)
1150 # Add the current timezone
1151 dt
= dt
.replace(tzinfo
=server_timezone
)
1154 return dt
.astimezone(pytz
.UTC
)
1156 # Convert the User model
1157 user_table
= inspect_table(metadata
, "core__users")
1158 for user
in db
.execute(user_table
.select()):
1159 db
.execute(user_table
.update().values(
1160 created
=dt_to_utc(user
.created
)
1161 ).where(user_table
.c
.id==user
.id))
1164 client_table
= inspect_table(metadata
, "core__clients")
1165 for client
in db
.execute(client_table
.select()):
1166 db
.execute(client_table
.update().values(
1167 created
=dt_to_utc(client
.created
),
1168 updated
=dt_to_utc(client
.updated
)
1169 ).where(client_table
.c
.id==client
.id))
1171 # Convert RequestToken
1172 rt_table
= inspect_table(metadata
, "core__request_tokens")
1173 for request_token
in db
.execute(rt_table
.select()):
1174 db
.execute(rt_table
.update().values(
1175 created
=dt_to_utc(request_token
.created
),
1176 updated
=dt_to_utc(request_token
.updated
)
1177 ).where(rt_table
.c
.token
==request_token
.token
))
1179 # Convert AccessToken
1180 at_table
= inspect_table(metadata
, "core__access_tokens")
1181 for access_token
in db
.execute(at_table
.select()):
1182 db
.execute(at_table
.update().values(
1183 created
=dt_to_utc(access_token
.created
),
1184 updated
=dt_to_utc(access_token
.updated
)
1185 ).where(at_table
.c
.token
==access_token
.token
))
1187 # Convert MediaEntry
1188 media_table
= inspect_table(metadata
, "core__media_entries")
1189 for media
in db
.execute(media_table
.select()):
1190 db
.execute(media_table
.update().values(
1191 created
=dt_to_utc(media
.created
)
1192 ).where(media_table
.c
.id==media
.id))
1194 # Convert Media Attachment File
1195 media_attachment_table
= inspect_table(metadata
, "core__attachment_files")
1196 for ma
in db
.execute(media_attachment_table
.select()):
1197 db
.execute(media_attachment_table
.update().values(
1198 created
=dt_to_utc(ma
.created
)
1199 ).where(media_attachment_table
.c
.id==ma
.id))
1201 # Convert MediaComment
1202 comment_table
= inspect_table(metadata
, "core__media_comments")
1203 for comment
in db
.execute(comment_table
.select()):
1204 db
.execute(comment_table
.update().values(
1205 created
=dt_to_utc(comment
.created
)
1206 ).where(comment_table
.c
.id==comment
.id))
1208 # Convert Collection
1209 collection_table
= inspect_table(metadata
, "core__collections")
1210 for collection
in db
.execute(collection_table
.select()):
1211 db
.execute(collection_table
.update().values(
1212 created
=dt_to_utc(collection
.created
)
1213 ).where(collection_table
.c
.id==collection
.id))
1215 # Convert Collection Item
1216 collection_item_table
= inspect_table(metadata
, "core__collection_items")
1217 for ci
in db
.execute(collection_item_table
.select()):
1218 db
.execute(collection_item_table
.update().values(
1219 added
=dt_to_utc(ci
.added
)
1220 ).where(collection_item_table
.c
.id==ci
.id))
1222 # Convert Comment subscription
1223 comment_sub
= inspect_table(metadata
, "core__comment_subscriptions")
1224 for sub
in db
.execute(comment_sub
.select()):
1225 db
.execute(comment_sub
.update().values(
1226 created
=dt_to_utc(sub
.created
)
1227 ).where(comment_sub
.c
.id==sub
.id))
1229 # Convert Notification
1230 notification_table
= inspect_table(metadata
, "core__notifications")
1231 for notification
in db
.execute(notification_table
.select()):
1232 db
.execute(notification_table
.update().values(
1233 created
=dt_to_utc(notification
.created
)
1234 ).where(notification_table
.c
.id==notification
.id))
1236 # Convert ReportBase
1237 reportbase_table
= inspect_table(metadata
, "core__reports")
1238 for report
in db
.execute(reportbase_table
.select()):
1239 db
.execute(reportbase_table
.update().values(
1240 created
=dt_to_utc(report
.created
)
1241 ).where(reportbase_table
.c
.id==report
.id))
1244 generator_table
= inspect_table(metadata
, "core__generators")
1245 for generator
in db
.execute(generator_table
.select()):
1246 db
.execute(generator_table
.update().values(
1247 published
=dt_to_utc(generator
.published
),
1248 updated
=dt_to_utc(generator
.updated
)
1249 ).where(generator_table
.c
.id==generator
.id))
1252 activity_table
= inspect_table(metadata
, "core__activities")
1253 for activity
in db
.execute(activity_table
.select()):
1254 db
.execute(activity_table
.update().values(
1255 published
=dt_to_utc(activity
.published
),
1256 updated
=dt_to_utc(activity
.updated
)
1257 ).where(activity_table
.c
.id==activity
.id))
1259 # Commit this to the database
1263 # Migrations to handle migrating from activity specific foreign key to the
1264 # new GenericForeignKey implementations. They have been split up to improve
1265 # readability and minimise errors
1268 class GenericModelReference_V0(declarative_base()):
1269 __tablename__
= "core__generic_model_reference"
1271 id = Column(Integer
, primary_key
=True)
1272 obj_pk
= Column(Integer
, nullable
=False)
1273 model_type
= Column(Unicode
, nullable
=False)
1275 @RegisterMigration(27, MIGRATIONS
)
1276 def create_generic_model_reference(db
):
1277 """ Creates the Generic Model Reference table """
1278 GenericModelReference_V0
.__table__
.create(db
.bind
)
1281 @RegisterMigration(28, MIGRATIONS
)
1282 def add_foreign_key_fields(db
):
1284 Add the fields for GenericForeignKey to the model under temporary name,
1285 this is so that later a data migration can occur. They will be renamed to
1286 the origional names.
1288 metadata
= MetaData(bind
=db
.bind
)
1289 activity_table
= inspect_table(metadata
, "core__activities")
1291 # Create column and add to model.
1292 object_column
= Column("temp_object", Integer
, GenericForeignKey())
1293 object_column
.create(activity_table
)
1295 target_column
= Column("temp_target", Integer
, GenericForeignKey())
1296 target_column
.create(activity_table
)
1298 # Commit this to the database
1301 @RegisterMigration(29, MIGRATIONS
)
1302 def migrate_data_foreign_keys(db
):
1304 This will migrate the data from the old object and target attributes which
1305 use the old ActivityIntermediator to the new temparay fields which use the
1306 new GenericForeignKey.
1308 metadata
= MetaData(bind
=db
.bind
)
1309 activity_table
= inspect_table(metadata
, "core__activities")
1310 ai_table
= inspect_table(metadata
, "core__activity_intermediators")
1311 gmr_table
= inspect_table(metadata
, "core__generic_model_reference")
1314 # Iterate through all activities doing the migration per activity.
1315 for activity
in db
.execute(activity_table
.select()):
1316 # First do the "Activity.object" migration to "Activity.temp_object"
1317 # I need to get the object from the Activity, I can't use the old
1318 # Activity.get_object as we're in a migration.
1319 object_ai
= db
.execute(ai_table
.select(
1320 ai_table
.c
.id==activity
.object
1323 object_ai_type
= ActivityIntermediator_R0
.TYPES
[object_ai
.type]
1324 object_ai_table
= inspect_table(metadata
, object_ai_type
.__tablename
__)
1326 activity_object
= db
.execute(object_ai_table
.select(
1327 object_ai_table
.c
.activity
==object_ai
.id
1330 # now we need to create the GenericModelReference
1331 object_gmr
= db
.execute(gmr_table
.insert().values(
1332 obj_pk
=activity_object
.id,
1333 model_type
=object_ai_type
.__tablename
__
1336 # Now set the ID of the GenericModelReference in the GenericForignKey
1337 db
.execute(activity_table
.update().values(
1338 temp_object
=object_gmr
.inserted_primary_key
[0]
1341 # Now do same process for "Activity.target" to "Activity.temp_target"
1342 # not all Activities have a target so if it doesn't just skip the rest
1344 if activity
.target
is None:
1347 # Now get the target for the activity.
1348 target_ai
= db
.execute(ai_table
.select(
1349 ai_table
.c
.id==activity
.target
1352 target_ai_type
= ActivityIntermediator_R0
.TYPES
[target_ai
.type]
1353 target_ai_table
= inspect_table(metadata
, target_ai_type
.__tablename
__)
1355 activity_target
= db
.execute(target_ai_table
.select(
1356 target_ai_table
.c
.activity
==target_ai
.id
1359 # We now want to create the new target GenericModelReference
1360 target_gmr
= db
.execute(gmr_table
.insert().values(
1361 obj_pk
=activity_target
.id,
1362 model_type
=target_ai_type
.__tablename
__
1365 # Now set the ID of the GenericModelReference in the GenericForignKey
1366 db
.execute(activity_table
.update().values(
1367 temp_object
=target_gmr
.inserted_primary_key
[0]
1370 # Commit to the database.
1373 @RegisterMigration(30, MIGRATIONS
)
1374 def rename_and_remove_object_and_target(db
):
1376 Renames the new Activity.object and Activity.target fields and removes the
1379 metadata
= MetaData(bind
=db
.bind
)
1380 activity_table
= inspect_table(metadata
, "core__activities")
1382 # Firstly lets remove the old fields.
1383 old_object_column
= activity_table
.columns
["object"]
1384 old_target_column
= activity_table
.columns
["target"]
1387 old_object_column
.drop()
1388 old_target_column
.drop()
1390 # Now get the new columns.
1391 new_object_column
= activity_table
.columns
["temp_object"]
1392 new_target_column
= activity_table
.columns
["temp_target"]
1394 # rename them to the old names.
1395 new_object_column
.alter(name
="object_id")
1396 new_target_column
.alter(name
="target_id")
1398 # Commit the changes to the database.
1401 @RegisterMigration(31, MIGRATIONS
)
1402 def remove_activityintermediator(db
):
1404 This removes the old specific ActivityIntermediator model which has been
1405 superseeded by the GenericForeignKey field.
1407 metadata
= MetaData(bind
=db
.bind
)
1410 ai_table
= inspect_table(metadata
, "core__activity_intermediators")
1413 # Commit the changes