Restructure ForgotPassword view
[mediagoblin.git] / mediagoblin / db / models.py
index c7506dbba371274d53dff3c4814409937edc19da..782bf8692d37ce916e407e877427bae2242b588d 100644 (file)
@@ -1,5 +1,5 @@
 # GNU MediaGoblin -- federated, autonomous media hosting
-# Copyright (C) 2011 Free Software Foundation, Inc
+# Copyright (C) 2011, 2012 MediaGoblin contributors.  See AUTHORS.
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published by
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-import datetime, uuid
-
-from mongokit import Document, Set
-
-from mediagoblin import util
-from mediagoblin.auth import lib as auth_lib
-from mediagoblin import mg_globals
-from mediagoblin.db import migrations
-from mediagoblin.db.util import ASCENDING, DESCENDING, ObjectId
-
-###################
-# Custom validators
-###################
-
-########
-# Models
-########
-
-
-class User(Document):
-    __collection__ = 'users'
-
-    structure = {
-        'username': unicode,
-        'email': unicode,
-        'created': datetime.datetime,
-        'plugin_data': dict, # plugins can dump stuff here.
-        'pw_hash': unicode,
-        'email_verified': bool,
-        'status': unicode,
-        'verification_key': unicode,
-        'is_admin': bool,
-        'url' : unicode,
-        'bio' : unicode
-        }
-
-    required_fields = ['username', 'created', 'pw_hash', 'email']
-
-    default_values = {
-        'created': datetime.datetime.utcnow,
-        'email_verified': False,
-        'status': u'needs_email_verification',
-        'verification_key': lambda: unicode(uuid.uuid4()),
-        'is_admin': False}
-        
-    migration_handler = migrations.UserMigration
-
-    def check_login(self, password):
-        """
-        See if a user can login with this password
-        """
-        return auth_lib.bcrypt_check_password(
-            password, self['pw_hash'])
+"""
+TODO: indexes on foreignkeys, where useful.
+"""
+
+import logging
+import datetime
+import sys
+
+from sqlalchemy import Column, Integer, Unicode, UnicodeText, DateTime, \
+        Boolean, ForeignKey, UniqueConstraint, PrimaryKeyConstraint, \
+        SmallInteger
+from sqlalchemy.orm import relationship, backref
+from sqlalchemy.orm.collections import attribute_mapped_collection
+from sqlalchemy.sql.expression import desc
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.util import memoized_property
+
+from mediagoblin.db.extratypes import PathTupleWithSlashes, JSONEncoded
+from mediagoblin.db.base import Base, DictReadAttrProxy, Session
+from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, MediaCommentMixin, CollectionMixin, CollectionItemMixin
+from mediagoblin.tools.files import delete_media_files
+
+# It's actually kind of annoying how sqlalchemy-migrate does this, if
+# I understand it right, but whatever.  Anyway, don't remove this :P
+#
+# We could do migration calls more manually instead of relying on
+# this import-based meddling...
+from migrate import changeset
 
+_log = logging.getLogger(__name__)
 
-class MediaEntry(Document):
-    __collection__ = 'media_entries'
 
-    structure = {
-        'uploader': ObjectId,
-        'title': unicode,
-        'slug': unicode,
-        'created': datetime.datetime,
-        'description': unicode, # May contain markdown/up
-        'description_html': unicode, # May contain plaintext, or HTML
-        'media_type': unicode,
-        'media_data': dict, # extra data relevant to this media_type
-        'plugin_data': dict, # plugins can dump stuff here.
-        'tags': [unicode],
-        'state': unicode,
+class User(Base, UserMixin):
+    """
+    TODO: We should consider moving some rarely used fields
+    into some sort of "shadow" table.
+    """
+    __tablename__ = "core__users"
+
+    id = Column(Integer, primary_key=True)
+    username = Column(Unicode, nullable=False, unique=True)
+    email = Column(Unicode, nullable=False)
+    created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+    pw_hash = Column(Unicode, nullable=False)
+    email_verified = Column(Boolean, default=False)
+    status = Column(Unicode, default=u"needs_email_verification", nullable=False)
+    # Intented to be nullable=False, but migrations would not work for it
+    # set to nullable=True implicitly.
+    wants_comment_notification = Column(Boolean, default=True)
+    verification_key = Column(Unicode)
+    is_admin = Column(Boolean, default=False, nullable=False)
+    url = Column(Unicode)
+    bio = Column(UnicodeText)  # ??
+    fp_verification_key = Column(Unicode)
+    fp_token_expire = Column(DateTime)
+
+    ## TODO
+    # plugin data would be in a separate model
+
+    def __repr__(self):
+        return '<{0} #{1} {2} {3} "{4}">'.format(
+                self.__class__.__name__,
+                self.id,
+                'verified' if self.email_verified else 'non-verified',
+                'admin' if self.is_admin else 'user',
+                self.username)
+
+    def delete(self, **kwargs):
+        """Deletes a User and all related entries/comments/files/..."""
+        # Delete this user's Collections and all contained CollectionItems
+        for collection in self.collections:
+            collection.delete(commit=False)
+
+        media_entries = MediaEntry.query.filter(MediaEntry.uploader == self.id)
+        for media in media_entries:
+            # TODO: Make sure that "MediaEntry.delete()" also deletes
+            # all related files/Comments
+            media.delete(del_orphan_tags=False, commit=False)
+
+        # Delete now unused tags
+        # TODO: import here due to cyclic imports!!! This cries for refactoring
+        from mediagoblin.db.util import clean_orphan_tags
+        clean_orphan_tags(commit=False)
+
+        # Delete user, pass through commit=False/True in kwargs
+        super(User, self).delete(**kwargs)
+        _log.info('Deleted user "{0}" account'.format(self.username))
+
+
+class MediaEntry(Base, MediaEntryMixin):
+    """
+    TODO: Consider fetching the media_files using join
+    """
+    __tablename__ = "core__media_entries"
+
+    id = Column(Integer, primary_key=True)
+    uploader = Column(Integer, ForeignKey(User.id), nullable=False, index=True)
+    title = Column(Unicode, nullable=False)
+    slug = Column(Unicode)
+    created = Column(DateTime, nullable=False, default=datetime.datetime.now,
+        index=True)
+    description = Column(UnicodeText) # ??
+    media_type = Column(Unicode, nullable=False)
+    state = Column(Unicode, default=u'unprocessed', nullable=False)
+        # or use sqlalchemy.types.Enum?
+    license = Column(Unicode)
+    collected = Column(Integer, default=0)
+
+    fail_error = Column(Unicode)
+    fail_metadata = Column(JSONEncoded)
+
+    transcoding_progress = Column(SmallInteger)
+
+    queued_media_file = Column(PathTupleWithSlashes)
+
+    queued_task_id = Column(Unicode)
+
+    __table_args__ = (
+        UniqueConstraint('uploader', 'slug'),
+        {})
+
+    get_uploader = relationship(User)
+
+    media_files_helper = relationship("MediaFile",
+        collection_class=attribute_mapped_collection("name"),
+        cascade="all, delete-orphan"
+        )
+    media_files = association_proxy('media_files_helper', 'file_path',
+        creator=lambda k, v: MediaFile(name=k, file_path=v)
+        )
+
+    attachment_files_helper = relationship("MediaAttachmentFile",
+        order_by="MediaAttachmentFile.created"
+        )
+    attachment_files = association_proxy("attachment_files_helper", "dict_view",
+        creator=lambda v: MediaAttachmentFile(
+            name=v["name"], filepath=v["filepath"])
+        )
+
+    tags_helper = relationship("MediaTag",
+        cascade="all, delete-orphan" # should be automatically deleted
+        )
+    tags = association_proxy("tags_helper", "dict_view",
+        creator=lambda v: MediaTag(name=v["name"], slug=v["slug"])
+        )
+
+    collections_helper = relationship("CollectionItem",
+        cascade="all, delete-orphan"
+        )
+    collections = association_proxy("collections_helper", "in_collection")
+
+    ## TODO
+    # media_data
+    # fail_error
+
+    def get_comments(self, ascending=False):
+        order_col = MediaComment.created
+        if not ascending:
+            order_col = desc(order_col)
+        return MediaComment.query.filter_by(
+            media_entry=self.id).order_by(order_col)
 
-        # For now let's assume there can only be one main file queued
-        # at a time
-        'queued_media_file': [unicode],
+    def url_to_prev(self, urlgen):
+        """get the next 'newer' entry by this user"""
+        media = MediaEntry.query.filter(
+            (MediaEntry.uploader == self.uploader)
+            & (MediaEntry.state == u'processed')
+            & (MediaEntry.id > self.id)).order_by(MediaEntry.id).first()
 
-        # A dictionary of logical names to filepaths
-        'media_files': dict,
+        if media is not None:
+            return media.url_for_self(urlgen)
 
-        # The following should be lists of lists, in appropriate file
-        # record form
-        'attachment_files': list,
+    def url_to_next(self, urlgen):
+        """get the next 'older' entry by this user"""
+        media = MediaEntry.query.filter(
+            (MediaEntry.uploader == self.uploader)
+            & (MediaEntry.state == u'processed')
+            & (MediaEntry.id < self.id)).order_by(desc(MediaEntry.id)).first()
 
-        # This one should just be a single file record
-        'thumbnail_file': [unicode]}
+        if media is not None:
+            return media.url_for_self(urlgen)
 
-    required_fields = [
-        'uploader', 'created', 'media_type', 'slug']
+    #@memoized_property
+    @property
+    def media_data(self):
+        session = Session()
 
-    default_values = {
-        'created': datetime.datetime.utcnow,
-        'state': u'unprocessed'}
+        return session.query(self.media_data_table).filter_by(
+            media_entry=self.id).first()
 
-    migration_handler = migrations.MediaEntryMigration
+    def media_data_init(self, **kwargs):
+        """
+        Initialize or update the contents of a media entry's media_data row
+        """
+        session = Session()
+
+        media_data = session.query(self.media_data_table).filter_by(
+            media_entry=self.id).first()
+
+        # No media data, so actually add a new one
+        if media_data is None:
+            media_data = self.media_data_table(
+                media_entry=self.id,
+                **kwargs)
+            session.add(media_data)
+        # Update old media data
+        else:
+            for field, value in kwargs.iteritems():
+                setattr(media_data, field, value)
+
+    @memoized_property
+    def media_data_table(self):
+        # TODO: memoize this
+        models_module = self.media_type + '.models'
+        __import__(models_module)
+        return sys.modules[models_module].DATA_MODEL
+
+    def __repr__(self):
+        safe_title = self.title.encode('ascii', 'replace')
+
+        return '<{classname} {id}: {title}>'.format(
+                classname=self.__class__.__name__,
+                id=self.id,
+                title=safe_title)
+
+    def delete(self, del_orphan_tags=True, **kwargs):
+        """Delete MediaEntry and all related files/attachments/comments
+
+        This will *not* automatically delete unused collections, which
+        can remain empty...
+
+        :param del_orphan_tags: True/false if we delete unused Tags too
+        :param commit: True/False if this should end the db transaction"""
+        # User's CollectionItems are automatically deleted via "cascade".
+        # Delete all the associated comments
+        for comment in self.get_comments():
+            comment.delete(commit=False)
+
+        # Delete all related files/attachments
+        try:
+            delete_media_files(self)
+        except OSError, error:
+            # Returns list of files we failed to delete
+            _log.error('No such files from the user "{1}" to delete: '
+                       '{0}'.format(str(error), self.get_uploader))
+        _log.info('Deleted Media entry id "{0}"'.format(self.id))
+        # Related MediaTag's are automatically cleaned, but we might
+        # want to clean out unused Tag's too.
+        if del_orphan_tags:
+            # TODO: Import here due to cyclic imports!!!
+            #       This cries for refactoring
+            from mediagoblin.db.util import clean_orphan_tags
+            clean_orphan_tags(commit=False)
+        # pass through commit=False/True in kwargs
+        super(MediaEntry, self).delete(**kwargs)
+
+
+class FileKeynames(Base):
+    """
+    keywords for various places.
+    currently the MediaFile keys
+    """
+    __tablename__ = "core__file_keynames"
+    id = Column(Integer, primary_key=True)
+    name = Column(Unicode, unique=True)
 
-    def get_comments(self):
-        return self.db.MediaComment.find({
-                'media_entry': self['_id']}).sort('created', DESCENDING)
+    def __repr__(self):
+        return "<FileKeyname %r: %r>" % (self.id, self.name)
 
-    def main_mediafile(self):
-        pass
+    @classmethod
+    def find_or_new(cls, name):
+        t = cls.query.filter_by(name=name).first()
+        if t is not None:
+            return t
+        return cls(name=name)
 
-    def generate_slug(self):
-        self['slug'] = util.slugify(self['title'])
 
-        duplicate = mg_globals.database.media_entries.find_one(
-            {'slug': self['slug']})
-        
-        if duplicate:
-            self['slug'] = "%s-%s" % (self['_id'], self['slug'])
+class MediaFile(Base):
+    """
+    TODO: Highly consider moving "name" into a new table.
+    TODO: Consider preloading said table in software
+    """
+    __tablename__ = "core__mediafiles"
 
-    def url_for_self(self, urlgen):
-        """
-        Generate an appropriate url for ourselves
+    media_entry = Column(
+        Integer, ForeignKey(MediaEntry.id),
+        nullable=False)
+    name_id = Column(SmallInteger, ForeignKey(FileKeynames.id), nullable=False)
+    file_path = Column(PathTupleWithSlashes)
 
-        Use a slug if we have one, else use our '_id'.
-        """
-        uploader = self.uploader()
+    __table_args__ = (
+        PrimaryKeyConstraint('media_entry', 'name_id'),
+        {})
 
-        if self.get('slug'):
-            return urlgen(
-                'mediagoblin.user_pages.media_home',
-                user=uploader['username'],
-                media=self['slug'])
-        else:
-            return urlgen(
-                'mediagoblin.user_pages.media_home',
-                user=uploader['username'],
-                media=unicode(self['_id']))
-            
-    def url_to_prev(self, urlgen):
-        """
-        Provide a url to the previous entry from this user, if there is one
-        """
-        cursor = self.db.MediaEntry.find({'_id' : {"$lt": self['_id']}, 
-                                          'uploader': self['uploader']}).sort(
-                                                    '_id', DESCENDING).limit(1)
-                                                    
-        if cursor.count():
-            return urlgen('mediagoblin.user_pages.media_home',
-                          user=self.uploader()['username'],
-                          media=unicode(cursor[0]['slug']))
-        
-    def url_to_next(self, urlgen):
-        """
-        Provide a url to the next entry from this user, if there is one
-        """
-        cursor = self.db.MediaEntry.find({'_id' : {"$gt": self['_id']}, 
-                                          'uploader': self['uploader']}).sort(
-                                                    '_id', ASCENDING).limit(1)
+    def __repr__(self):
+        return "<MediaFile %s: %r>" % (self.name, self.file_path)
 
-        if cursor.count():
-            return urlgen('mediagoblin.user_pages.media_home',
-                          user=self.uploader()['username'],
-                          media=unicode(cursor[0]['slug']))
+    name_helper = relationship(FileKeynames, lazy="joined", innerjoin=True)
+    name = association_proxy('name_helper', 'name',
+        creator=FileKeynames.find_or_new
+        )
 
-    def uploader(self):
-        return self.db.User.find_one({'_id': self['uploader']})
 
-class MediaComment(Document):
-    __collection__ = 'media_comments'
+class MediaAttachmentFile(Base):
+    __tablename__ = "core__attachment_files"
+
+    id = Column(Integer, primary_key=True)
+    media_entry = Column(
+        Integer, ForeignKey(MediaEntry.id),
+        nullable=False)
+    name = Column(Unicode, nullable=False)
+    filepath = Column(PathTupleWithSlashes)
+    created = Column(DateTime, nullable=False, default=datetime.datetime.now)
 
-    structure = {
-        'media_entry': ObjectId,
-        'author': ObjectId,
-        'created': datetime.datetime,
-        'content': unicode,
-        'content_html': unicode}
+    @property
+    def dict_view(self):
+        """A dict like view on this object"""
+        return DictReadAttrProxy(self)
+
+
+class Tag(Base):
+    __tablename__ = "core__tags"
+
+    id = Column(Integer, primary_key=True)
+    slug = Column(Unicode, nullable=False, unique=True)
+
+    def __repr__(self):
+        return "<Tag %r: %r>" % (self.id, self.slug)
+
+    @classmethod
+    def find_or_new(cls, slug):
+        t = cls.query.filter_by(slug=slug).first()
+        if t is not None:
+            return t
+        return cls(slug=slug)
+
+
+class MediaTag(Base):
+    __tablename__ = "core__media_tags"
+
+    id = Column(Integer, primary_key=True)
+    media_entry = Column(
+        Integer, ForeignKey(MediaEntry.id),
+        nullable=False, index=True)
+    tag = Column(Integer, ForeignKey(Tag.id), nullable=False, index=True)
+    name = Column(Unicode)
+    # created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+    __table_args__ = (
+        UniqueConstraint('tag', 'media_entry'),
+        {})
+
+    tag_helper = relationship(Tag)
+    slug = association_proxy('tag_helper', 'slug',
+        creator=Tag.find_or_new
+        )
+
+    def __init__(self, name=None, slug=None):
+        Base.__init__(self)
+        if name is not None:
+            self.name = name
+        if slug is not None:
+            self.tag_helper = Tag.find_or_new(slug)
+
+    @property
+    def dict_view(self):
+        """A dict like view on this object"""
+        return DictReadAttrProxy(self)
+
+
+class MediaComment(Base, MediaCommentMixin):
+    __tablename__ = "core__media_comments"
+
+    id = Column(Integer, primary_key=True)
+    media_entry = Column(
+        Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
+    author = Column(Integer, ForeignKey(User.id), nullable=False)
+    created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+    content = Column(UnicodeText, nullable=False)
+
+    get_author = relationship(User)
+
+
+class Collection(Base, CollectionMixin):
+    """An 'album' or 'set' of media by a user.
+
+    On deletion, contained CollectionItems get automatically reaped via
+    SQL cascade"""
+    __tablename__ = "core__collections"
 
-    required_fields = [
-        'media_entry', 'author', 'created', 'content']
+    id = Column(Integer, primary_key=True)
+    title = Column(Unicode, nullable=False)
+    slug = Column(Unicode)
+    created = Column(DateTime, nullable=False, default=datetime.datetime.now,
+        index=True)
+    description = Column(UnicodeText)
+    creator = Column(Integer, ForeignKey(User.id), nullable=False)
+    # TODO: No of items in Collection. Badly named, can we migrate to num_items?
+    items = Column(Integer, default=0)
 
-    default_values = {
-        'created': datetime.datetime.utcnow}
+    get_creator = relationship(User, backref="collections")
 
-    def media_entry(self):
-        return self.db.MediaEntry.find_one({'_id': self['media_entry']})
+    def get_collection_items(self, ascending=False):
+        #TODO, is this still needed with self.collection_items being available?
+        order_col = CollectionItem.position
+        if not ascending:
+            order_col = desc(order_col)
+        return CollectionItem.query.filter_by(
+            collection=self.id).order_by(order_col)
+
+
+class CollectionItem(Base, CollectionItemMixin):
+    __tablename__ = "core__collection_items"
+
+    id = Column(Integer, primary_key=True)
+    media_entry = Column(
+        Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
+    collection = Column(Integer, ForeignKey(Collection.id), nullable=False)
+    note = Column(UnicodeText, nullable=True)
+    added = Column(DateTime, nullable=False, default=datetime.datetime.now)
+    position = Column(Integer)
+    in_collection = relationship("Collection",
+                                 backref=backref(
+                                     "collection_items",
+                                     cascade="all, delete-orphan"))
+
+    get_media_entry = relationship(MediaEntry)
+
+    __table_args__ = (
+        UniqueConstraint('collection', 'media_entry'),
+        {})
+
+    @property
+    def dict_view(self):
+        """A dict like view on this object"""
+        return DictReadAttrProxy(self)
+
+
+class ProcessingMetaData(Base):
+    __tablename__ = 'core__processing_metadata'
+
+    id = Column(Integer, primary_key=True)
+    media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False,
+            index=True)
+    media_entry = relationship(MediaEntry,
+            backref=backref('processing_metadata',
+                cascade='all, delete-orphan'))
+    callback_url = Column(Unicode)
+
+    @property
+    def dict_view(self):
+        """A dict like view on this object"""
+        return DictReadAttrProxy(self)
+
+
+MODELS = [
+    User, MediaEntry, Tag, MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
+    MediaAttachmentFile, ProcessingMetaData]
 
-    def author(self):
-        return self.db.User.find_one({'_id': self['author']})
 
-REGISTER_MODELS = [
-    MediaEntry,
-    User,
-    MediaComment]
+######################################################
+# Special, migrations-tracking table
+#
+# Not listed in MODELS because this is special and not
+# really migrated, but used for migrations (for now)
+######################################################
 
+class MigrationData(Base):
+    __tablename__ = "core__migrations"
+
+    name = Column(Unicode, primary_key=True)
+    version = Column(Integer, nullable=False, default=0)
+
+######################################################
+
+
+def show_table_init(engine_uri):
+    if engine_uri is None:
+        engine_uri = 'sqlite:///:memory:'
+    from sqlalchemy import create_engine
+    engine = create_engine(engine_uri, echo=True)
+
+    Base.metadata.create_all(engine)
 
-def register_models(connection):
-    """
-    Register all models in REGISTER_MODELS with this connection.
-    """
-    connection.register(REGISTER_MODELS)
 
+if __name__ == '__main__':
+    from sys import argv
+    print repr(argv)
+    if len(argv) == 2:
+        uri = argv[1]
+    else:
+        uri = None
+    show_table_init(uri)