1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
18 TODO: indexes on foreignkeys, where useful.
24 from sqlalchemy
import Column
, Integer
, Unicode
, UnicodeText
, DateTime
, \
25 Boolean
, ForeignKey
, UniqueConstraint
, PrimaryKeyConstraint
, \
27 from sqlalchemy
.orm
import relationship
, backref
, with_polymorphic
28 from sqlalchemy
.orm
.collections
import attribute_mapped_collection
29 from sqlalchemy
.sql
.expression
import desc
30 from sqlalchemy
.ext
.associationproxy
import association_proxy
31 from sqlalchemy
.util
import memoized_property
34 from mediagoblin
.db
.extratypes
import PathTupleWithSlashes
, JSONEncoded
35 from mediagoblin
.db
.base
import Base
, DictReadAttrProxy
36 from mediagoblin
.db
.mixin
import UserMixin
, MediaEntryMixin
, \
37 MediaCommentMixin
, CollectionMixin
, CollectionItemMixin
38 from mediagoblin
.tools
.files
import delete_media_files
39 from mediagoblin
.tools
.common
import import_component
41 # It's actually kind of annoying how sqlalchemy-migrate does this, if
42 # I understand it right, but whatever. Anyway, don't remove this :P
44 # We could do migration calls more manually instead of relying on
45 # this import-based meddling...
46 from migrate
import changeset
48 _log
= logging
.getLogger(__name__
)
51 class User(Base
, UserMixin
):
53 TODO: We should consider moving some rarely used fields
54 into some sort of "shadow" table.
56 __tablename__
= "core__users"
58 id = Column(Integer
, primary_key
=True)
59 username
= Column(Unicode
, nullable
=False, unique
=True)
60 # Note: no db uniqueness constraint on email because it's not
61 # reliable (many email systems case insensitive despite against
62 # the RFC) and because it would be a mess to implement at this
64 email
= Column(Unicode
, nullable
=False)
65 pw_hash
= Column(Unicode
)
66 email_verified
= Column(Boolean
, default
=False)
67 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
68 status
= Column(Unicode
, default
=u
"needs_email_verification", nullable
=False)
69 # Intented to be nullable=False, but migrations would not work for it
70 # set to nullable=True implicitly.
71 wants_comment_notification
= Column(Boolean
, default
=True)
72 wants_notifications
= Column(Boolean
, default
=True)
73 license_preference
= Column(Unicode
)
74 is_admin
= Column(Boolean
, default
=False, nullable
=False)
76 bio
= Column(UnicodeText
) # ??
77 uploaded
= Column(Integer
, default
=0)
78 upload_limit
= Column(Integer
)
81 # plugin data would be in a separate model
84 return '<{0} #{1} {2} {3} "{4}">'.format(
85 self
.__class
__.__name
__,
87 'verified' if self
.email_verified
else 'non-verified',
88 'admin' if self
.is_admin
else 'user',
91 def delete(self
, **kwargs
):
92 """Deletes a User and all related entries/comments/files/..."""
93 # Collections get deleted by relationships.
95 media_entries
= MediaEntry
.query
.filter(MediaEntry
.uploader
== self
.id)
96 for media
in media_entries
:
97 # TODO: Make sure that "MediaEntry.delete()" also deletes
98 # all related files/Comments
99 media
.delete(del_orphan_tags
=False, commit
=False)
101 # Delete now unused tags
102 # TODO: import here due to cyclic imports!!! This cries for refactoring
103 from mediagoblin
.db
.util
import clean_orphan_tags
104 clean_orphan_tags(commit
=False)
106 # Delete user, pass through commit=False/True in kwargs
107 super(User
, self
).delete(**kwargs
)
108 _log
.info('Deleted user "{0}" account'.format(self
.username
))
113 Model representing a client - Used for API Auth
115 __tablename__
= "core__clients"
117 id = Column(Unicode
, nullable
=True, primary_key
=True)
118 secret
= Column(Unicode
, nullable
=False)
119 expirey
= Column(DateTime
, nullable
=True)
120 application_type
= Column(Unicode
, nullable
=False)
121 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
122 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
125 redirect_uri
= Column(JSONEncoded
, nullable
=True)
126 logo_url
= Column(Unicode
, nullable
=True)
127 application_name
= Column(Unicode
, nullable
=True)
128 contacts
= Column(JSONEncoded
, nullable
=True)
131 if self
.application_name
:
132 return "<Client {0} - {1}>".format(self
.application_name
, self
.id)
134 return "<Client {0}>".format(self
.id)
136 class RequestToken(Base
):
138 Model for representing the request tokens
140 __tablename__
= "core__request_tokens"
142 token
= Column(Unicode
, primary_key
=True)
143 secret
= Column(Unicode
, nullable
=False)
144 client
= Column(Unicode
, ForeignKey(Client
.id))
145 user
= Column(Integer
, ForeignKey(User
.id), nullable
=True)
146 used
= Column(Boolean
, default
=False)
147 authenticated
= Column(Boolean
, default
=False)
148 verifier
= Column(Unicode
, nullable
=True)
149 callback
= Column(Unicode
, nullable
=False, default
=u
"oob")
150 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
151 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
153 class AccessToken(Base
):
155 Model for representing the access tokens
157 __tablename__
= "core__access_tokens"
159 token
= Column(Unicode
, nullable
=False, primary_key
=True)
160 secret
= Column(Unicode
, nullable
=False)
161 user
= Column(Integer
, ForeignKey(User
.id))
162 request_token
= Column(Unicode
, ForeignKey(RequestToken
.token
))
163 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
164 updated
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
167 class NonceTimestamp(Base
):
169 A place the timestamp and nonce can be stored - this is for OAuth1
171 __tablename__
= "core__nonce_timestamps"
173 nonce
= Column(Unicode
, nullable
=False, primary_key
=True)
174 timestamp
= Column(DateTime
, nullable
=False, primary_key
=True)
177 class MediaEntry(Base
, MediaEntryMixin
):
179 TODO: Consider fetching the media_files using join
181 __tablename__
= "core__media_entries"
183 id = Column(Integer
, primary_key
=True)
184 uploader
= Column(Integer
, ForeignKey(User
.id), nullable
=False, index
=True)
185 title
= Column(Unicode
, nullable
=False)
186 slug
= Column(Unicode
)
187 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
,
189 description
= Column(UnicodeText
) # ??
190 media_type
= Column(Unicode
, nullable
=False)
191 state
= Column(Unicode
, default
=u
'unprocessed', nullable
=False)
192 # or use sqlalchemy.types.Enum?
193 license
= Column(Unicode
)
194 collected
= Column(Integer
, default
=0)
195 file_size
= Column(Integer
, default
=0)
197 fail_error
= Column(Unicode
)
198 fail_metadata
= Column(JSONEncoded
)
200 transcoding_progress
= Column(SmallInteger
)
202 queued_media_file
= Column(PathTupleWithSlashes
)
204 queued_task_id
= Column(Unicode
)
207 UniqueConstraint('uploader', 'slug'),
210 get_uploader
= relationship(User
)
212 media_files_helper
= relationship("MediaFile",
213 collection_class
=attribute_mapped_collection("name"),
214 cascade
="all, delete-orphan"
216 media_files
= association_proxy('media_files_helper', 'file_path',
217 creator
=lambda k
, v
: MediaFile(name
=k
, file_path
=v
)
220 attachment_files_helper
= relationship("MediaAttachmentFile",
221 cascade
="all, delete-orphan",
222 order_by
="MediaAttachmentFile.created"
224 attachment_files
= association_proxy("attachment_files_helper", "dict_view",
225 creator
=lambda v
: MediaAttachmentFile(
226 name
=v
["name"], filepath
=v
["filepath"])
229 tags_helper
= relationship("MediaTag",
230 cascade
="all, delete-orphan" # should be automatically deleted
232 tags
= association_proxy("tags_helper", "dict_view",
233 creator
=lambda v
: MediaTag(name
=v
["name"], slug
=v
["slug"])
236 collections_helper
= relationship("CollectionItem",
237 cascade
="all, delete-orphan"
239 collections
= association_proxy("collections_helper", "in_collection")
244 def get_comments(self
, ascending
=False):
245 order_col
= MediaComment
.created
247 order_col
= desc(order_col
)
248 return self
.all_comments
.order_by(order_col
)
250 def url_to_prev(self
, urlgen
):
251 """get the next 'newer' entry by this user"""
252 media
= MediaEntry
.query
.filter(
253 (MediaEntry
.uploader
== self
.uploader
)
254 & (MediaEntry
.state
== u
'processed')
255 & (MediaEntry
.id > self
.id)).order_by(MediaEntry
.id).first()
257 if media
is not None:
258 return media
.url_for_self(urlgen
)
260 def url_to_next(self
, urlgen
):
261 """get the next 'older' entry by this user"""
262 media
= MediaEntry
.query
.filter(
263 (MediaEntry
.uploader
== self
.uploader
)
264 & (MediaEntry
.state
== u
'processed')
265 & (MediaEntry
.id < self
.id)).order_by(desc(MediaEntry
.id)).first()
267 if media
is not None:
268 return media
.url_for_self(urlgen
)
270 def get_file_metadata(self
, file_key
, metadata_key
=None):
272 Return the file_metadata dict of a MediaFile. If metadata_key is given,
273 return the value of the key.
275 media_file
= MediaFile
.query
.filter_by(media_entry
=self
.id,
276 name
=unicode(file_key
)).first()
280 return media_file
.file_metadata
.get(metadata_key
, None)
282 return media_file
.file_metadata
284 def set_file_metadata(self
, file_key
, **kwargs
):
286 Update the file_metadata of a MediaFile.
288 media_file
= MediaFile
.query
.filter_by(media_entry
=self
.id,
289 name
=unicode(file_key
)).first()
291 file_metadata
= media_file
.file_metadata
or {}
293 for key
, value
in kwargs
.iteritems():
294 file_metadata
[key
] = value
296 media_file
.file_metadata
= file_metadata
299 def media_data(self
):
300 return getattr(self
, self
.media_data_ref
)
302 def media_data_init(self
, **kwargs
):
304 Initialize or update the contents of a media entry's media_data row
306 media_data
= self
.media_data
308 if media_data
is None:
309 # Get the correct table:
310 table
= import_component(self
.media_type
+ '.models:DATA_MODEL')
311 # No media data, so actually add a new one
312 media_data
= table(**kwargs
)
313 # Get the relationship set up.
314 media_data
.get_media_entry
= self
316 # Update old media data
317 for field
, value
in kwargs
.iteritems():
318 setattr(media_data
, field
, value
)
321 def media_data_ref(self
):
322 return import_component(self
.media_type
+ '.models:BACKREF_NAME')
325 safe_title
= self
.title
.encode('ascii', 'replace')
327 return '<{classname} {id}: {title}>'.format(
328 classname
=self
.__class
__.__name
__,
332 def delete(self
, del_orphan_tags
=True, **kwargs
):
333 """Delete MediaEntry and all related files/attachments/comments
335 This will *not* automatically delete unused collections, which
338 :param del_orphan_tags: True/false if we delete unused Tags too
339 :param commit: True/False if this should end the db transaction"""
340 # User's CollectionItems are automatically deleted via "cascade".
341 # Comments on this Media are deleted by cascade, hopefully.
343 # Delete all related files/attachments
345 delete_media_files(self
)
346 except OSError, error
:
347 # Returns list of files we failed to delete
348 _log
.error('No such files from the user "{1}" to delete: '
349 '{0}'.format(str(error
), self
.get_uploader
))
350 _log
.info('Deleted Media entry id "{0}"'.format(self
.id))
351 # Related MediaTag's are automatically cleaned, but we might
352 # want to clean out unused Tag's too.
354 # TODO: Import here due to cyclic imports!!!
355 # This cries for refactoring
356 from mediagoblin
.db
.util
import clean_orphan_tags
357 clean_orphan_tags(commit
=False)
358 # pass through commit=False/True in kwargs
359 super(MediaEntry
, self
).delete(**kwargs
)
362 class FileKeynames(Base
):
364 keywords for various places.
365 currently the MediaFile keys
367 __tablename__
= "core__file_keynames"
368 id = Column(Integer
, primary_key
=True)
369 name
= Column(Unicode
, unique
=True)
372 return "<FileKeyname %r: %r>" % (self
.id, self
.name
)
375 def find_or_new(cls
, name
):
376 t
= cls
.query
.filter_by(name
=name
).first()
379 return cls(name
=name
)
382 class MediaFile(Base
):
384 TODO: Highly consider moving "name" into a new table.
385 TODO: Consider preloading said table in software
387 __tablename__
= "core__mediafiles"
389 media_entry
= Column(
390 Integer
, ForeignKey(MediaEntry
.id),
392 name_id
= Column(SmallInteger
, ForeignKey(FileKeynames
.id), nullable
=False)
393 file_path
= Column(PathTupleWithSlashes
)
394 file_metadata
= Column(JSONEncoded
)
397 PrimaryKeyConstraint('media_entry', 'name_id'),
401 return "<MediaFile %s: %r>" % (self
.name
, self
.file_path
)
403 name_helper
= relationship(FileKeynames
, lazy
="joined", innerjoin
=True)
404 name
= association_proxy('name_helper', 'name',
405 creator
=FileKeynames
.find_or_new
409 class MediaAttachmentFile(Base
):
410 __tablename__
= "core__attachment_files"
412 id = Column(Integer
, primary_key
=True)
413 media_entry
= Column(
414 Integer
, ForeignKey(MediaEntry
.id),
416 name
= Column(Unicode
, nullable
=False)
417 filepath
= Column(PathTupleWithSlashes
)
418 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
422 """A dict like view on this object"""
423 return DictReadAttrProxy(self
)
427 __tablename__
= "core__tags"
429 id = Column(Integer
, primary_key
=True)
430 slug
= Column(Unicode
, nullable
=False, unique
=True)
433 return "<Tag %r: %r>" % (self
.id, self
.slug
)
436 def find_or_new(cls
, slug
):
437 t
= cls
.query
.filter_by(slug
=slug
).first()
440 return cls(slug
=slug
)
443 class MediaTag(Base
):
444 __tablename__
= "core__media_tags"
446 id = Column(Integer
, primary_key
=True)
447 media_entry
= Column(
448 Integer
, ForeignKey(MediaEntry
.id),
449 nullable
=False, index
=True)
450 tag
= Column(Integer
, ForeignKey(Tag
.id), nullable
=False, index
=True)
451 name
= Column(Unicode
)
452 # created = Column(DateTime, nullable=False, default=datetime.datetime.now)
455 UniqueConstraint('tag', 'media_entry'),
458 tag_helper
= relationship(Tag
)
459 slug
= association_proxy('tag_helper', 'slug',
460 creator
=Tag
.find_or_new
463 def __init__(self
, name
=None, slug
=None):
468 self
.tag_helper
= Tag
.find_or_new(slug
)
472 """A dict like view on this object"""
473 return DictReadAttrProxy(self
)
476 class MediaComment(Base
, MediaCommentMixin
):
477 __tablename__
= "core__media_comments"
479 id = Column(Integer
, primary_key
=True)
480 media_entry
= Column(
481 Integer
, ForeignKey(MediaEntry
.id), nullable
=False, index
=True)
482 author
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
483 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
484 content
= Column(UnicodeText
, nullable
=False)
486 # Cascade: Comments are owned by their creator. So do the full thing.
487 # lazy=dynamic: People might post a *lot* of comments,
488 # so make the "posted_comments" a query-like thing.
489 get_author
= relationship(User
,
490 backref
=backref("posted_comments",
492 cascade
="all, delete-orphan"))
493 get_entry
= relationship(MediaEntry
,
494 backref
=backref("comments",
496 cascade
="all, delete-orphan"))
498 # Cascade: Comments are somewhat owned by their MediaEntry.
499 # So do the full thing.
500 # lazy=dynamic: MediaEntries might have many comments,
501 # so make the "all_comments" a query-like thing.
502 get_media_entry
= relationship(MediaEntry
,
503 backref
=backref("all_comments",
505 cascade
="all, delete-orphan"))
508 class Collection(Base
, CollectionMixin
):
509 """An 'album' or 'set' of media by a user.
511 On deletion, contained CollectionItems get automatically reaped via
513 __tablename__
= "core__collections"
515 id = Column(Integer
, primary_key
=True)
516 title
= Column(Unicode
, nullable
=False)
517 slug
= Column(Unicode
)
518 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
,
520 description
= Column(UnicodeText
)
521 creator
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
522 # TODO: No of items in Collection. Badly named, can we migrate to num_items?
523 items
= Column(Integer
, default
=0)
525 # Cascade: Collections are owned by their creator. So do the full thing.
526 get_creator
= relationship(User
,
527 backref
=backref("collections",
528 cascade
="all, delete-orphan"))
531 UniqueConstraint('creator', 'slug'),
534 def get_collection_items(self
, ascending
=False):
535 #TODO, is this still needed with self.collection_items being available?
536 order_col
= CollectionItem
.position
538 order_col
= desc(order_col
)
539 return CollectionItem
.query
.filter_by(
540 collection
=self
.id).order_by(order_col
)
543 class CollectionItem(Base
, CollectionItemMixin
):
544 __tablename__
= "core__collection_items"
546 id = Column(Integer
, primary_key
=True)
547 media_entry
= Column(
548 Integer
, ForeignKey(MediaEntry
.id), nullable
=False, index
=True)
549 collection
= Column(Integer
, ForeignKey(Collection
.id), nullable
=False)
550 note
= Column(UnicodeText
, nullable
=True)
551 added
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
552 position
= Column(Integer
)
554 # Cascade: CollectionItems are owned by their Collection. So do the full thing.
555 in_collection
= relationship(Collection
,
558 cascade
="all, delete-orphan"))
560 get_media_entry
= relationship(MediaEntry
)
563 UniqueConstraint('collection', 'media_entry'),
568 """A dict like view on this object"""
569 return DictReadAttrProxy(self
)
572 class ProcessingMetaData(Base
):
573 __tablename__
= 'core__processing_metadata'
575 id = Column(Integer
, primary_key
=True)
576 media_entry_id
= Column(Integer
, ForeignKey(MediaEntry
.id), nullable
=False,
578 media_entry
= relationship(MediaEntry
,
579 backref
=backref('processing_metadata',
580 cascade
='all, delete-orphan'))
581 callback_url
= Column(Unicode
)
585 """A dict like view on this object"""
586 return DictReadAttrProxy(self
)
589 class CommentSubscription(Base
):
590 __tablename__
= 'core__comment_subscriptions'
591 id = Column(Integer
, primary_key
=True)
593 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
595 media_entry_id
= Column(Integer
, ForeignKey(MediaEntry
.id), nullable
=False)
596 media_entry
= relationship(MediaEntry
,
597 backref
=backref('comment_subscriptions',
598 cascade
='all, delete-orphan'))
600 user_id
= Column(Integer
, ForeignKey(User
.id), nullable
=False)
601 user
= relationship(User
,
602 backref
=backref('comment_subscriptions',
603 cascade
='all, delete-orphan'))
605 notify
= Column(Boolean
, nullable
=False, default
=True)
606 send_email
= Column(Boolean
, nullable
=False, default
=True)
609 return ('<{classname} #{id}: {user} {media} notify: '
610 '{notify} email: {email}>').format(
612 classname
=self
.__class
__.__name
__,
614 media
=self
.media_entry
,
616 email
=self
.send_email
)
619 class Notification(Base
):
620 __tablename__
= 'core__notifications'
621 id = Column(Integer
, primary_key
=True)
622 type = Column(Unicode
)
624 created
= Column(DateTime
, nullable
=False, default
=datetime
.datetime
.now
)
626 user_id
= Column(Integer
, ForeignKey('core__users.id'), nullable
=False,
628 seen
= Column(Boolean
, default
=lambda: False, index
=True)
631 backref
=backref('notifications', cascade
='all, delete-orphan'))
634 'polymorphic_identity': 'notification',
635 'polymorphic_on': type
639 return '<{klass} #{id}: {user}: {subject} ({seen})>'.format(
641 klass
=self
.__class
__.__name
__,
643 subject
=getattr(self
, 'subject', None),
644 seen
='unseen' if not self
.seen
else 'seen')
647 class CommentNotification(Notification
):
648 __tablename__
= 'core__comment_notifications'
649 id = Column(Integer
, ForeignKey(Notification
.id), primary_key
=True)
651 subject_id
= Column(Integer
, ForeignKey(MediaComment
.id))
652 subject
= relationship(
654 backref
=backref('comment_notifications', cascade
='all, delete-orphan'))
657 'polymorphic_identity': 'comment_notification'
661 class ProcessingNotification(Notification
):
662 __tablename__
= 'core__processing_notifications'
664 id = Column(Integer
, ForeignKey(Notification
.id), primary_key
=True)
666 subject_id
= Column(Integer
, ForeignKey(MediaEntry
.id))
667 subject
= relationship(
669 backref
=backref('processing_notifications',
670 cascade
='all, delete-orphan'))
673 'polymorphic_identity': 'processing_notification'
679 [ProcessingNotification
, CommentNotification
])
682 User
, Client
, RequestToken
, AccessToken
, NonceTimestamp
, MediaEntry
, Tag
,
683 MediaTag
, MediaComment
, Collection
, CollectionItem
, MediaFile
, FileKeynames
,
684 MediaAttachmentFile
, ProcessingMetaData
, Notification
, CommentNotification
,
685 ProcessingNotification
, CommentSubscription
]
688 Foundations are the default rows that are created immediately after the tables
689 are initialized. Each entry to this dictionary should be in the format of:
690 ModelConstructorObject:List of Dictionaries
691 (Each Dictionary represents a row on the Table to be created, containing each
692 of the columns' names as a key string, and each of the columns' values as a
695 ex. [NOTE THIS IS NOT BASED OFF OF OUR USER TABLE]
696 user_foundations = [{'name':u'Joanna', 'age':24},
697 {'name':u'Andrea', 'age':41}]
699 FOUNDATIONS = {User:user_foundations}
703 ######################################################
704 # Special, migrations-tracking table
706 # Not listed in MODELS because this is special and not
707 # really migrated, but used for migrations (for now)
708 ######################################################
710 class MigrationData(Base
):
711 __tablename__
= "core__migrations"
713 name
= Column(Unicode
, primary_key
=True)
714 version
= Column(Integer
, nullable
=False, default
=0)
716 ######################################################
719 def show_table_init(engine_uri
):
720 if engine_uri
is None:
721 engine_uri
= 'sqlite:///:memory:'
722 from sqlalchemy
import create_engine
723 engine
= create_engine(engine_uri
, echo
=True)
725 Base
.metadata
.create_all(engine
)
728 if __name__
== '__main__':