From: Sebastian Spaeth Date: Thu, 29 Nov 2012 16:23:28 +0000 (+0100) Subject: RIP out mongo X-Git-Url: https://vcs.fsf.org/?a=commitdiff_plain;h=bc142abc5592975c08319416d0af12c108504887;p=mediagoblin.git RIP out mongo Since sqlalchemy is providing our database abstraction and we have moved away from Mongo as the underlying database, it is now time to simplify things and rip out mongo. This provides the bulk of the changes, and can stand on its own. There are some followup tasks that can be done, such as removing now unneeded abstraction layers, e.g. db.sql.fake.py --- diff --git a/mediagoblin/app.py b/mediagoblin/app.py index 936a354f..c1636693 100644 --- a/mediagoblin/app.py +++ b/mediagoblin/app.py @@ -78,7 +78,7 @@ class MediaGoblinApp(object): setup_plugins() # Set up the database - self.connection, self.db = setup_database() + self.db = setup_database() # Register themes self.theme_registry, self.current_theme = register_themes(app_config) diff --git a/mediagoblin/db/__init__.py b/mediagoblin/db/__init__.py index d149f62a..27ca4b06 100644 --- a/mediagoblin/db/__init__.py +++ b/mediagoblin/db/__init__.py @@ -18,18 +18,6 @@ Database Abstraction/Wrapper Layer ================================== - **NOTE from Chris Webber:** I asked Elrond to explain why he put - ASCENDING and DESCENDING in db/util.py when we could just import from - pymongo. Read beow for why, but note that nobody is actually doing - this and there's no proof that we'll ever support more than - MongoDB... it would be a huge amount of work to do so. - - If you really want to prove that possible, jump on IRC and talk to - us about making such a branch. In the meanwhile, it doesn't hurt to - have things as they are... if it ever makes it hard for us to - actually do things, we might revisit or remove this. But for more - information, read below. - This submodule is for most of the db specific stuff. There are two main ideas here: diff --git a/mediagoblin/db/mongo/__init__.py b/mediagoblin/db/mongo/__init__.py deleted file mode 100644 index 621845ba..00000000 --- a/mediagoblin/db/mongo/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . diff --git a/mediagoblin/db/mongo/indexes.py b/mediagoblin/db/mongo/indexes.py deleted file mode 100644 index a63c24ae..00000000 --- a/mediagoblin/db/mongo/indexes.py +++ /dev/null @@ -1,146 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -""" -Indexes for the local database. - -To add new indexes ------------------- - -Indexes are recorded in the following format: - -ACTIVE_INDEXES = { - 'collection_name': { - 'identifier': { # key identifier used for possibly deprecating later - 'index': [index_foo_goes_here]}} - -... and anything else being parameters to the create_index function -(including unique=True, etc) - -Current indexes must be registered in ACTIVE_INDEXES... deprecated -indexes should be marked in DEPRECATED_INDEXES. - -Remember, ordering of compound indexes MATTERS. Read below for more. - -REQUIRED READING: - - http://kylebanker.com/blog/2010/09/21/the-joy-of-mongodb-indexes/ - - - http://www.mongodb.org/display/DOCS/Indexes - - http://www.mongodb.org/display/DOCS/Indexing+Advice+and+FAQ - - -To remove deprecated indexes ----------------------------- - -Removing deprecated indexes is the same, just move the index into the -deprecated indexes mapping. - -DEPRECATED_INDEXES = { - 'collection_name': { - 'deprecated_index_identifier1': { - 'index': [index_foo_goes_here]}} - -... etc. - -If an index has been deprecated that identifier should NEVER BE USED -AGAIN. Eg, if you previously had 'awesomepants_unique', you shouldn't -use 'awesomepants_unique' again, you should create a totally new name -or at worst use 'awesomepants_unique2'. -""" - -from pymongo import ASCENDING, DESCENDING - - -################ -# Active indexes -################ -ACTIVE_INDEXES = {} - -# MediaEntry indexes -# ------------------ - -MEDIAENTRY_INDEXES = { - 'uploader_slug_unique': { - # Matching an object to an uploader + slug. - # MediaEntries are unique on these two combined, eg: - # /u/${myuser}/m/${myslugname}/ - 'index': [('uploader', ASCENDING), - ('slug', ASCENDING)], - 'unique': True}, - - 'created': { - # A global index for all media entries created, in descending - # order. This is used for the site's frontpage. - 'index': [('created', DESCENDING)]}, - - 'uploader_created': { - # Indexing on uploaders and when media entries are created. - # Used for showing a user gallery, etc. - 'index': [('uploader', ASCENDING), - ('created', DESCENDING)]}, - - 'state_uploader_tags_created': { - # Indexing on processed?, media uploader, associated tags, and - # timestamp Used for showing media items matching a tag - # search, most recent first. - 'index': [('state', ASCENDING), - ('uploader', ASCENDING), - ('tags.slug', DESCENDING), - ('created', DESCENDING)]}, - - 'state_tags_created': { - # Indexing on processed?, media tags, and timestamp (across all users) - # This is used for a front page tag search. - 'index': [('state', ASCENDING), - ('tags.slug', DESCENDING), - ('created', DESCENDING)]}} - - -ACTIVE_INDEXES['media_entries'] = MEDIAENTRY_INDEXES - - -# User indexes -# ------------ - -USER_INDEXES = { - 'username_unique': { - # Index usernames, and make sure they're unique. - # ... I guess we might need to adjust this once we're federated :) - 'index': 'username', - 'unique': True}, - 'created': { - # All most recently created users - 'index': 'created'}} - - -ACTIVE_INDEXES['users'] = USER_INDEXES - - -# MediaComment indexes - -MEDIA_COMMENT_INDEXES = { - 'mediaentry_created': { - 'index': [('media_entry', ASCENDING), - ('created', DESCENDING)]}} - -ACTIVE_INDEXES['media_comments'] = MEDIA_COMMENT_INDEXES - - -#################### -# Deprecated indexes -#################### - -DEPRECATED_INDEXES = {} diff --git a/mediagoblin/db/mongo/migrations.py b/mediagoblin/db/mongo/migrations.py deleted file mode 100644 index 569dec88..00000000 --- a/mediagoblin/db/mongo/migrations.py +++ /dev/null @@ -1,208 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -from mediagoblin.db.mongo.util import RegisterMigration -from mediagoblin.tools.text import cleaned_markdown_conversion - - -def add_table_field(db, table_name, field_name, default_value): - """ - Add a new field to the table/collection named table_name. - The field will have the name field_name and the value default_value - """ - db[table_name].update( - {field_name: {'$exists': False}}, - {'$set': {field_name: default_value}}, - multi=True) - - -def drop_table_field(db, table_name, field_name): - """ - Drop an old field from a table/collection - """ - db[table_name].update( - {field_name: {'$exists': True}}, - {'$unset': {field_name: 1}}, - multi=True) - - -# Please see mediagoblin/tests/test_migrations.py for some examples of -# basic migrations. - - -@RegisterMigration(1) -def user_add_bio_html(database): - """ - Users now have richtext bios via Markdown, reflect appropriately. - """ - collection = database['users'] - - target = collection.find( - {'bio_html': {'$exists': False}}) - - for document in target: - document['bio_html'] = cleaned_markdown_conversion( - document['bio']) - collection.save(document) - - -@RegisterMigration(2) -def mediaentry_mediafiles_main_to_original(database): - """ - Rename "main" media file to "original". - """ - collection = database['media_entries'] - target = collection.find( - {'media_files.main': {'$exists': True}}) - - for document in target: - original = document['media_files'].pop('main') - document['media_files']['original'] = original - - collection.save(document) - - -@RegisterMigration(3) -def mediaentry_remove_thumbnail_file(database): - """ - Use media_files['thumb'] instead of media_entries['thumbnail_file'] - """ - database['media_entries'].update( - {'thumbnail_file': {'$exists': True}}, - {'$unset': {'thumbnail_file': 1}}, - multi=True) - - -@RegisterMigration(4) -def mediaentry_add_queued_task_id(database): - """ - Add the 'queued_task_id' field for entries that don't have it. - """ - add_table_field(database, 'media_entries', 'queued_task_id', None) - - -@RegisterMigration(5) -def mediaentry_add_fail_error_and_metadata(database): - """ - Add 'fail_error' and 'fail_metadata' fields to media entries - """ - add_table_field(database, 'media_entries', 'fail_error', None) - add_table_field(database, 'media_entries', 'fail_metadata', {}) - - -@RegisterMigration(6) -def user_add_forgot_password_token_and_expires(database): - """ - Add token and expiration fields to help recover forgotten passwords - """ - add_table_field(database, 'users', 'fp_verification_key', None) - add_table_field(database, 'users', 'fp_token_expire', None) - - -@RegisterMigration(7) -def media_type_image_to_multimedia_type_image(database): - database['media_entries'].update( - {'media_type': 'image'}, - {'$set': {'media_type': 'mediagoblin.media_types.image'}}, - multi=True) - - -@RegisterMigration(8) -def mediaentry_add_license(database): - """ - Add the 'license' field for entries that don't have it. - """ - add_table_field(database, 'media_entries', 'license', None) - - -@RegisterMigration(9) -def remove_calculated_html(database): - """ - Drop pre-rendered html again and calculate things - on the fly (and cache): - - User.bio_html - - MediaEntry.description_html - - MediaComment.content_html - """ - drop_table_field(database, 'users', 'bio_html') - drop_table_field(database, 'media_entries', 'description_html') - drop_table_field(database, 'media_comments', 'content_html') - - -@RegisterMigration(10) -def convert_video_media_data(database): - """ - Move media_data["video"] directly into media_data - """ - collection = database['media_entries'] - target = collection.find( - {'media_data.video': {'$exists': True}}) - - for document in target: - assert len(document['media_data']) == 1 - document['media_data'] = document['media_data']['video'] - collection.save(document) - - -@RegisterMigration(11) -def convert_gps_media_data(database): - """ - Move media_data["gps"]["*"] to media_data["gps_*"]. - In preparation for media_data.gps_* - """ - collection = database['media_entries'] - target = collection.find( - {'media_data.gps': {'$exists': True}}) - - for document in target: - for key, value in document['media_data']['gps'].iteritems(): - document['media_data']['gps_' + key] = value - del document['media_data']['gps'] - collection.save(document) - - -@RegisterMigration(12) -def convert_exif_media_data(database): - """ - Move media_data["exif"]["clean"] to media_data["exif_all"]. - Drop media_data["exif"]["useful"] - In preparation for media_data.exif_all - """ - collection = database['media_entries'] - target = collection.find( - {'media_data.exif.clean': {'$exists': True}}) - - for document in target: - media_data = document['media_data'] - - exif_all = media_data['exif'].pop('clean') - if len(exif_all): - media_data['exif_all'] = exif_all - - del media_data['exif']['useful'] - - assert len(media_data['exif']) == 0 - del media_data['exif'] - - collection.save(document) - - -@RegisterMigration(13) -def user_add_wants_comment_notification(database): - """ - Add wants_comment_notification to user model - """ - add_table_field(database, 'users', 'wants_comment_notification', True) diff --git a/mediagoblin/db/mongo/models.py b/mediagoblin/db/mongo/models.py deleted file mode 100644 index 3f1363d5..00000000 --- a/mediagoblin/db/mongo/models.py +++ /dev/null @@ -1,310 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import datetime - -from mongokit import Document - -from mediagoblin.db.mongo import migrations -from mediagoblin.db.mongo.util import ASCENDING, DESCENDING, ObjectId -from mediagoblin.tools.pagination import Pagination -from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, MediaCommentMixin - - -class MongoPK(object): - """An alias for the _id primary key""" - def __get__(self, instance, cls): - return instance['_id'] - def __set__(self, instance, val): - instance['_id'] = val - def __delete__(self, instance): - del instance['_id'] - - -################### -# Custom validators -################### - -######## -# Models -######## - - -class User(Document, UserMixin): - """ - A user of MediaGoblin. - - Structure: - - username: The username of this user, should be unique to this instance. - - email: Email address of this user - - created: When the user was created - - plugin_data: a mapping of extra plugin information for this User. - Nothing uses this yet as we don't have plugins, but someday we - might... :) - - pw_hash: Hashed version of user's password. - - email_verified: Whether or not the user has verified their email or not. - Most parts of the site are disabled for users who haven't yet. - - status: whether or not the user is active, etc. Currently only has two - values, 'needs_email_verification' or 'active'. (In the future, maybe - we'll change this to a boolean with a key of 'active' and have a - separate field for a reason the user's been disabled if that's - appropriate... email_verified is already separate, after all.) - - wants_comment_notification: The user has selected that they want to be - notified when comments are posted on their media. - - verification_key: If the user is awaiting email verification, the user - will have to provide this key (which will be encoded in the presented - URL) in order to confirm their email as active. - - is_admin: Whether or not this user is an administrator or not. - - url: this user's personal webpage/website, if appropriate. - - bio: biography of this user (plaintext, in markdown) - """ - __collection__ = 'users' - use_dot_notation = True - - structure = { - 'username': unicode, - 'email': unicode, - 'created': datetime.datetime, - 'plugin_data': dict, # plugins can dump stuff here. - 'pw_hash': unicode, - 'email_verified': bool, - 'status': unicode, - 'wants_comment_notification': bool, - 'verification_key': unicode, - 'is_admin': bool, - 'url': unicode, - 'bio': unicode, # May contain markdown - 'fp_verification_key': unicode, # forgotten password verification key - 'fp_token_expire': datetime.datetime, - } - - required_fields = ['username', 'created', 'pw_hash', 'email'] - - default_values = { - 'created': datetime.datetime.utcnow, - 'email_verified': False, - 'wants_comment_notification': True, - 'status': u'needs_email_verification', - 'is_admin': False} - - id = MongoPK() - - -class MediaEntry(Document, MediaEntryMixin): - """ - Record of a piece of media. - - Structure: - - uploader: A reference to a User who uploaded this. - - - title: Title of this work - - - slug: A normalized "slug" which can be used as part of a URL to retrieve - this work, such as 'my-works-name-in-slug-form' may be viewable by - 'http://mg.example.org/u/username/m/my-works-name-in-slug-form/' - Note that since URLs are constructed this way, slugs must be unique - per-uploader. (An index is provided to enforce that but code should be - written on the python side to ensure this as well.) - - - created: Date and time of when this piece of work was uploaded. - - - description: Uploader-set description of this work. This can be marked - up with MarkDown for slight fanciness (links, boldness, italics, - paragraphs...) - - - media_type: What type of media is this? Currently we only support - 'image' ;) - - - media_data: Extra information that's media-format-dependent. - For example, images might contain some EXIF data that's not appropriate - to other formats. You might store it like: - - mediaentry.media_data['exif'] = { - 'manufacturer': 'CASIO', - 'model': 'QV-4000', - 'exposure_time': .659} - - Alternately for video you might store: - - # play length in seconds - mediaentry.media_data['play_length'] = 340 - - ... so what's appropriate here really depends on the media type. - - - plugin_data: a mapping of extra plugin information for this User. - Nothing uses this yet as we don't have plugins, but someday we - might... :) - - - tags: A list of tags. Each tag is stored as a dictionary that has a key - for the actual name and the normalized name-as-slug, so ultimately this - looks like: - [{'name': 'Gully Gardens', - 'slug': 'gully-gardens'}, - {'name': 'Castle Adventure Time?!", - 'slug': 'castle-adventure-time'}] - - - state: What's the state of this file? Active, inactive, disabled, etc... - But really for now there are only two states: - "unprocessed": uploaded but needs to go through processing for display - "processed": processed and able to be displayed - - - license: URI for media's license. - - - queued_media_file: storage interface style filepath describing a file - queued for processing. This is stored in the mg_globals.queue_store - storage system. - - - queued_task_id: celery task id. Use this to fetch the task state. - - - media_files: Files relevant to this that have actually been processed - and are available for various types of display. Stored like: - {'thumb': ['dir1', 'dir2', 'pic.png'} - - - attachment_files: A list of "attachment" files, ones that aren't - critical to this piece of media but may be usefully relevant to people - viewing the work. (currently unused.) - - - fail_error: path to the exception raised - - fail_metadata: - """ - __collection__ = 'media_entries' - use_dot_notation = True - - structure = { - 'uploader': ObjectId, - 'title': unicode, - 'slug': unicode, - 'created': datetime.datetime, - 'description': unicode, # May contain markdown/up - 'media_type': unicode, - 'media_data': dict, # extra data relevant to this media_type - 'plugin_data': dict, # plugins can dump stuff here. - 'tags': [dict], - 'state': unicode, - 'license': unicode, - - # For now let's assume there can only be one main file queued - # at a time - 'queued_media_file': [unicode], - 'queued_task_id': unicode, - - # A dictionary of logical names to filepaths - 'media_files': dict, - - # The following should be lists of lists, in appropriate file - # record form - 'attachment_files': list, - - # If things go badly in processing things, we'll store that - # data here - 'fail_error': unicode, - 'fail_metadata': dict} - - required_fields = [ - 'uploader', 'created', 'media_type', 'slug'] - - default_values = { - 'created': datetime.datetime.utcnow, - 'state': u'unprocessed'} - - id = MongoPK() - - def media_data_init(self, **kwargs): - self.media_data.update(kwargs) - - def get_comments(self, ascending=False): - if ascending: - order = ASCENDING - else: - order = DESCENDING - - return self.db.MediaComment.find({ - 'media_entry': self._id}).sort('created', order) - - def url_to_prev(self, urlgen): - """ - Provide a url to the previous entry from this user, if there is one - """ - cursor = self.db.MediaEntry.find({'_id': {"$gt": self._id}, - 'uploader': self.uploader, - 'state': 'processed'}).sort( - '_id', ASCENDING).limit(1) - for media in cursor: - return media.url_for_self(urlgen) - - def url_to_next(self, urlgen): - """ - Provide a url to the next entry from this user, if there is one - """ - cursor = self.db.MediaEntry.find({'_id': {"$lt": self._id}, - 'uploader': self.uploader, - 'state': 'processed'}).sort( - '_id', DESCENDING).limit(1) - - for media in cursor: - return media.url_for_self(urlgen) - - @property - def get_uploader(self): - return self.db.User.find_one({'_id': self.uploader}) - - -class MediaComment(Document, MediaCommentMixin): - """ - A comment on a MediaEntry. - - Structure: - - media_entry: The media entry this comment is attached to - - author: user who posted this comment - - created: when the comment was created - - content: plaintext (but markdown'able) version of the comment's content. - """ - - __collection__ = 'media_comments' - use_dot_notation = True - - structure = { - 'media_entry': ObjectId, - 'author': ObjectId, - 'created': datetime.datetime, - 'content': unicode, - } - - required_fields = [ - 'media_entry', 'author', 'created', 'content'] - - default_values = { - 'created': datetime.datetime.utcnow} - - def media_entry(self): - return self.db.MediaEntry.find_one({'_id': self['media_entry']}) - - @property - def get_author(self): - return self.db.User.find_one({'_id': self['author']}) - - -REGISTER_MODELS = [ - MediaEntry, - User, - MediaComment] - - -def register_models(connection): - """ - Register all models in REGISTER_MODELS with this connection. - """ - connection.register(REGISTER_MODELS) diff --git a/mediagoblin/db/mongo/open.py b/mediagoblin/db/mongo/open.py deleted file mode 100644 index c4f37b42..00000000 --- a/mediagoblin/db/mongo/open.py +++ /dev/null @@ -1,82 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import pymongo -import mongokit -from paste.deploy.converters import asint -from mediagoblin.db.mongo import models -from mediagoblin.db.mongo.util import MigrationManager - - -def load_models(app_config): - pass - - -def connect_database_from_config(app_config, use_pymongo=False): - """ - Connect to the main database, take config from app_config - - Optionally use pymongo instead of mongokit for the connection. - """ - port = app_config.get('db_port') - if port: - port = asint(port) - - if use_pymongo: - connection = pymongo.Connection( - app_config.get('db_host'), port) - else: - connection = mongokit.Connection( - app_config.get('db_host'), port) - return connection - - -def setup_connection_and_db_from_config(app_config, use_pymongo=False): - """ - Setup connection and database from config. - - Optionally use pymongo instead of mongokit. - """ - connection = connect_database_from_config(app_config, use_pymongo) - database_path = app_config['db_name'] - db = connection[database_path] - - if not use_pymongo: - models.register_models(connection) - - return (connection, db) - - -def check_db_migrations_current(db): - # This MUST be imported so as to set up the appropriate migrations! - from mediagoblin.db.mongo import migrations - - # Init the migration number if necessary - migration_manager = MigrationManager(db) - migration_manager.install_migration_version_if_missing() - - # Tiny hack to warn user if our migration is out of date - if not migration_manager.database_at_latest_migration(): - db_migration_num = migration_manager.database_current_migration() - latest_migration_num = migration_manager.latest_migration() - if db_migration_num < latest_migration_num: - print ( - "*WARNING:* Your migrations are out of date, " - "maybe run ./bin/gmg migrate?") - elif db_migration_num > latest_migration_num: - print ( - "*WARNING:* Your migrations are out of date... " - "in fact they appear to be from the future?!") diff --git a/mediagoblin/db/mongo/util.py b/mediagoblin/db/mongo/util.py deleted file mode 100644 index f61ae6be..00000000 --- a/mediagoblin/db/mongo/util.py +++ /dev/null @@ -1,318 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -""" -Utilities for database operations. - -Some note on migration and indexing tools: - -We store information about what the state of the database is in the -'mediagoblin' document of the 'app_metadata' collection. Keys in that -document relevant to here: - - - 'migration_number': The integer representing the current state of - the migrations -""" - -import copy - -# Imports that other modules might use -from pymongo import ASCENDING, DESCENDING -from pymongo.errors import InvalidId -from mongokit import ObjectId - -from mediagoblin.db.mongo.indexes import ACTIVE_INDEXES, DEPRECATED_INDEXES - - -################ -# Indexing tools -################ - - -def add_new_indexes(database, active_indexes=ACTIVE_INDEXES): - """ - Add any new indexes to the database. - - Args: - - database: pymongo or mongokit database instance. - - active_indexes: indexes to possibly add in the pattern of: - {'collection_name': { - 'identifier': { - 'index': [index_foo_goes_here], - 'unique': True}} - where 'index' is the index to add and all other options are - arguments for collection.create_index. - - Returns: - A list of indexes added in form ('collection', 'index_name') - """ - indexes_added = [] - - for collection_name, indexes in active_indexes.iteritems(): - collection = database[collection_name] - collection_indexes = collection.index_information().keys() - - for index_name, index_data in indexes.iteritems(): - if not index_name in collection_indexes: - # Get a copy actually so we don't modify the actual - # structure - index_data = copy.copy(index_data) - index = index_data.pop('index') - collection.create_index( - index, name=index_name, **index_data) - - indexes_added.append((collection_name, index_name)) - - return indexes_added - - -def remove_deprecated_indexes(database, deprecated_indexes=DEPRECATED_INDEXES): - """ - Remove any deprecated indexes from the database. - - Args: - - database: pymongo or mongokit database instance. - - deprecated_indexes: the indexes to deprecate in the pattern of: - {'collection_name': { - 'identifier': { - 'index': [index_foo_goes_here], - 'unique': True}} - - (... although we really only need the 'identifier' here, as the - rest of the information isn't used in this case. But it's kept - around so we can remember what it was) - - Returns: - A list of indexes removed in form ('collection', 'index_name') - """ - indexes_removed = [] - - for collection_name, indexes in deprecated_indexes.iteritems(): - collection = database[collection_name] - collection_indexes = collection.index_information().keys() - - for index_name, index_data in indexes.iteritems(): - if index_name in collection_indexes: - collection.drop_index(index_name) - - indexes_removed.append((collection_name, index_name)) - - return indexes_removed - - -################# -# Migration tools -################# - -# The default migration registry... -# -# Don't set this yourself! RegisterMigration will automatically fill -# this with stuff via decorating methods in migrations.py - -class MissingCurrentMigration(Exception): - pass - - -MIGRATIONS = {} - - -class RegisterMigration(object): - """ - Tool for registering migrations - - Call like: - - @RegisterMigration(33) - def update_dwarves(database): - [...] - - This will register your migration with the default migration - registry. Alternately, to specify a very specific - migration_registry, you can pass in that as the second argument. - - Note, the number of your migration should NEVER be 0 or less than - 0. 0 is the default "no migrations" state! - """ - def __init__(self, migration_number, migration_registry=MIGRATIONS): - assert migration_number > 0, "Migration number must be > 0!" - assert migration_number not in migration_registry, \ - "Duplicate migration numbers detected! That's not allowed!" - - self.migration_number = migration_number - self.migration_registry = migration_registry - - def __call__(self, migration): - self.migration_registry[self.migration_number] = migration - return migration - - -class MigrationManager(object): - """ - Migration handling tool. - - Takes information about a database, lets you update the database - to the latest migrations, etc. - """ - def __init__(self, database, migration_registry=MIGRATIONS): - """ - Args: - - database: database we're going to migrate - - migration_registry: where we should find all migrations to - run - """ - self.database = database - self.migration_registry = migration_registry - self._sorted_migrations = None - - def _ensure_current_migration_record(self): - """ - If there isn't a database[u'app_metadata'] mediagoblin entry - with the 'current_migration', throw an error. - """ - if self.database_current_migration() is None: - raise MissingCurrentMigration( - "Tried to call function which requires " - "'current_migration' set in database") - - @property - def sorted_migrations(self): - """ - Sort migrations if necessary and store in self._sorted_migrations - """ - if not self._sorted_migrations: - self._sorted_migrations = sorted( - self.migration_registry.items(), - # sort on the key... the migration number - key=lambda migration_tuple: migration_tuple[0]) - - return self._sorted_migrations - - def latest_migration(self): - """ - Return a migration number for the latest migration, or 0 if - there are no migrations. - """ - if self.sorted_migrations: - return self.sorted_migrations[-1][0] - else: - # If no migrations have been set, we start at 0. - return 0 - - def set_current_migration(self, migration_number): - """ - Set the migration in the database to migration_number - """ - # Add the mediagoblin migration if necessary - self.database[u'app_metadata'].update( - {u'_id': u'mediagoblin'}, - {u'$set': {u'current_migration': migration_number}}, - upsert=True) - - def install_migration_version_if_missing(self): - """ - Sets the migration to the latest version if no migration - version at all is set. - """ - mgoblin_metadata = self.database[u'app_metadata'].find_one( - {u'_id': u'mediagoblin'}) - if not mgoblin_metadata: - latest_migration = self.latest_migration() - self.set_current_migration(latest_migration) - - def database_current_migration(self): - """ - Return the current migration in the database. - """ - mgoblin_metadata = self.database[u'app_metadata'].find_one( - {u'_id': u'mediagoblin'}) - if not mgoblin_metadata: - return None - else: - return mgoblin_metadata[u'current_migration'] - - def database_at_latest_migration(self): - """ - See if the database is at the latest migration. - Returns a boolean. - """ - current_migration = self.database_current_migration() - return current_migration == self.latest_migration() - - def migrations_to_run(self): - """ - Get a list of migrations to run still, if any. - - Note that calling this will set your migration version to the - latest version if it isn't installed to anything yet! - """ - self._ensure_current_migration_record() - - db_current_migration = self.database_current_migration() - - return [ - (migration_number, migration_func) - for migration_number, migration_func in self.sorted_migrations - if migration_number > db_current_migration] - - def migrate_new(self, pre_callback=None, post_callback=None): - """ - Run all migrations. - - Includes two optional args: - - pre_callback: if called, this is a callback on something to - run pre-migration. Takes (migration_number, migration_func) - as arguments - - pre_callback: if called, this is a callback on something to - run post-migration. Takes (migration_number, migration_func) - as arguments - """ - # If we aren't set to any version number, presume we're at the - # latest (which means we'll do nothing here...) - self.install_migration_version_if_missing() - - for migration_number, migration_func in self.migrations_to_run(): - if pre_callback: - pre_callback(migration_number, migration_func) - migration_func(self.database) - self.set_current_migration(migration_number) - if post_callback: - post_callback(migration_number, migration_func) - - -########################## -# Random utility functions -########################## - - -def atomic_update(table, query_dict, update_values): - table.collection.update( - query_dict, - {"$set": update_values}) - - -def check_media_slug_used(db, uploader_id, slug, ignore_m_id): - query_dict = {'uploader': uploader_id, 'slug': slug} - if ignore_m_id is not None: - query_dict['_id'] = {'$ne': ignore_m_id} - existing_user_slug_entries = db.MediaEntry.find( - query_dict).count() - return existing_user_slug_entries - - -def media_entries_for_tag_slug(db, tag_slug): - return db.MediaEntry.find( - {u'state': u'processed', - u'tags.slug': tag_slug}) diff --git a/mediagoblin/db/open.py b/mediagoblin/db/open.py index f4c38511..e32d8853 100644 --- a/mediagoblin/db/open.py +++ b/mediagoblin/db/open.py @@ -14,16 +14,6 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -try: - from mediagoblin.db.sql_switch import use_sql -except ImportError: - use_sql = False - -if use_sql: - from mediagoblin.db.sql.open import \ - setup_connection_and_db_from_config, check_db_migrations_current, \ - load_models -else: - from mediagoblin.db.mongo.open import \ +from mediagoblin.db.sql.open import \ setup_connection_and_db_from_config, check_db_migrations_current, \ load_models diff --git a/mediagoblin/db/sql/convert.py b/mediagoblin/db/sql/convert.py deleted file mode 100644 index ac64cf8d..00000000 --- a/mediagoblin/db/sql/convert.py +++ /dev/null @@ -1,282 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -from copy import copy -from itertools import chain, imap - -from mediagoblin.init import setup_global_and_app_config - -from mediagoblin.db.sql.base import Session -from mediagoblin.db.sql.models_v0 import Base_v0 -from mediagoblin.db.sql.models_v0 import (User, MediaEntry, MediaComment, - Tag, MediaTag, MediaFile, MediaAttachmentFile, MigrationData, - ImageData, VideoData, AsciiData, AudioData) -from mediagoblin.db.sql.open import setup_connection_and_db_from_config as \ - sql_connect -from mediagoblin.db.mongo.open import setup_connection_and_db_from_config as \ - mongo_connect - - -obj_id_table = dict() - - -def add_obj_ids(entry, new_entry): - global obj_id_table - print "\t%r -> SQL id %r" % (entry._id, new_entry.id) - obj_id_table[entry._id] = new_entry.id - - -def copy_attrs(entry, new_entry, attr_list): - for a in attr_list: - val = entry[a] - setattr(new_entry, a, val) - - -def copy_reference_attr(entry, new_entry, ref_attr): - val = entry[ref_attr] - val = obj_id_table[val] - setattr(new_entry, ref_attr, val) - - -def convert_users(mk_db): - session = Session() - - for entry in mk_db.User.find().sort('created'): - print entry.username - - new_entry = User() - copy_attrs(entry, new_entry, - ('username', 'email', 'created', 'pw_hash', 'email_verified', - 'status', 'verification_key', 'is_admin', 'url', - 'bio', - 'fp_verification_key', 'fp_token_expire',)) - # new_entry.fp_verification_expire = entry.fp_token_expire - - session.add(new_entry) - session.flush() - add_obj_ids(entry, new_entry) - - session.commit() - session.close() - - -def convert_media_entries(mk_db): - session = Session() - - for entry in mk_db.MediaEntry.find().sort('created'): - print repr(entry.title) - - new_entry = MediaEntry() - copy_attrs(entry, new_entry, - ('title', 'slug', 'created', - 'description', - 'media_type', 'state', 'license', - 'fail_error', 'fail_metadata', - 'queued_task_id',)) - copy_reference_attr(entry, new_entry, "uploader") - - session.add(new_entry) - session.flush() - add_obj_ids(entry, new_entry) - - for key, value in entry.media_files.iteritems(): - new_file = MediaFile(name=key, file_path=value) - new_file.media_entry = new_entry.id - Session.add(new_file) - - for attachment in entry.attachment_files: - new_attach = MediaAttachmentFile( - name=attachment["name"], - filepath=attachment["filepath"], - created=attachment["created"] - ) - new_attach.media_entry = new_entry.id - Session.add(new_attach) - - session.commit() - session.close() - - -def convert_image(mk_db): - session = Session() - - for media in mk_db.MediaEntry.find( - {'media_type': 'mediagoblin.media_types.image'}).sort('created'): - media_data = copy(media.media_data) - - if len(media_data): - media_data_row = ImageData(**media_data) - media_data_row.media_entry = obj_id_table[media['_id']] - session.add(media_data_row) - - session.commit() - session.close() - - -def convert_video(mk_db): - session = Session() - - for media in mk_db.MediaEntry.find( - {'media_type': 'mediagoblin.media_types.video'}).sort('created'): - media_data_row = VideoData(**media.media_data) - media_data_row.media_entry = obj_id_table[media['_id']] - session.add(media_data_row) - - session.commit() - session.close() - - -def convert_media_tags(mk_db): - session = Session() - session.autoflush = False - - for media in mk_db.MediaEntry.find().sort('created'): - print repr(media.title) - - for otag in media.tags: - print " ", repr((otag["slug"], otag["name"])) - - nslug = session.query(Tag).filter_by(slug=otag["slug"]).first() - print " ", repr(nslug) - if nslug is None: - nslug = Tag(slug=otag["slug"]) - session.add(nslug) - session.flush() - print " ", repr(nslug), nslug.id - - ntag = MediaTag() - ntag.tag = nslug.id - ntag.name = otag["name"] - ntag.media_entry = obj_id_table[media._id] - session.add(ntag) - - session.commit() - session.close() - - -def convert_media_comments(mk_db): - session = Session() - - for entry in mk_db.MediaComment.find().sort('created'): - print repr(entry.content) - - new_entry = MediaComment() - copy_attrs(entry, new_entry, - ('created', - 'content',)) - - try: - copy_reference_attr(entry, new_entry, "media_entry") - copy_reference_attr(entry, new_entry, "author") - except KeyError as e: - print('KeyError in convert_media_comments(): {0}'.format(e)) - else: - session.add(new_entry) - session.flush() - add_obj_ids(entry, new_entry) - - session.commit() - session.close() - - -media_types_tables = ( - ("mediagoblin.media_types.image", (ImageData,)), - ("mediagoblin.media_types.video", (VideoData,)), - ("mediagoblin.media_types.ascii", (AsciiData,)), - ("mediagoblin.media_types.audio", (AudioData,)), - ) - - -def convert_add_migration_versions(dummy_sql_db): - session = Session() - - for name in chain(("__main__",), - imap(lambda e: e[0], media_types_tables)): - print "\tAdding %s" % (name,) - m = MigrationData(name=unicode(name), version=0) - session.add(m) - - session.commit() - session.close() - - -def cleanup_sql_tables(sql_db): - for mt, table_list in media_types_tables: - session = Session() - - count = session.query(MediaEntry.media_type). \ - filter_by(media_type=unicode(mt)).count() - print " %s: %d entries" % (mt, count) - - if count == 0: - print "\tAnalyzing tables" - for tab in table_list: - cnt2 = session.query(tab).count() - print "\t %s: %d entries" % (tab.__tablename__, cnt2) - assert cnt2 == 0 - - print "\tRemoving migration info" - mi = session.query(MigrationData).filter_by(name=unicode(mt)).one() - session.delete(mi) - session.commit() - session.close() - - print "\tDropping tables" - tables = [model.__table__ for model in table_list] - Base_v0.metadata.drop_all(sql_db.engine, tables=tables) - - session.close() - - -def print_header(title): - print "\n=== %s ===" % (title,) - - -convert_call_list = ( - ("Converting Users", convert_users), - ("Converting Media Entries", convert_media_entries), - ("Converting Media Data for Images", convert_image), - ("Cnnverting Media Data for Videos", convert_video), - ("Converting Tags for Media", convert_media_tags), - ("Converting Media Comments", convert_media_comments), - ) - -sql_call_list = ( - ("Filling Migration Tables", convert_add_migration_versions), - ("Analyzing/Cleaning SQL Data", cleanup_sql_tables), - ) - -def run_conversion(config_name): - global_config, app_config = setup_global_and_app_config(config_name) - - sql_conn, sql_db = sql_connect(app_config) - mk_conn, mk_db = mongo_connect(app_config) - - Base_v0.metadata.create_all(sql_db.engine) - - for title, func in convert_call_list: - print_header(title) - func(mk_db) - Session.remove() - - for title, func in sql_call_list: - print_header(title) - func(sql_db) - Session.remove() - - -if __name__ == '__main__': - run_conversion("mediagoblin.ini") diff --git a/mediagoblin/db/sql/open.py b/mediagoblin/db/sql/open.py index 9db21c56..801c92ea 100644 --- a/mediagoblin/db/sql/open.py +++ b/mediagoblin/db/sql/open.py @@ -71,7 +71,7 @@ def setup_connection_and_db_from_config(app_config): # logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) Session.configure(bind=engine) - return "dummy conn", DatabaseMaster(engine) + return DatabaseMaster(engine) def check_db_migrations_current(db): diff --git a/mediagoblin/db/sql/util.py b/mediagoblin/db/sql/util.py index f6a3dc17..e83fbab2 100644 --- a/mediagoblin/db/sql/util.py +++ b/mediagoblin/db/sql/util.py @@ -322,6 +322,6 @@ def check_collection_slug_used(dummy_db, creator_id, slug, ignore_c_id): if __name__ == '__main__': from mediagoblin.db.sql.open import setup_connection_and_db_from_config - conn,db = setup_connection_and_db_from_config({'sql_engine':'sqlite:///mediagoblin.db'}) + db = setup_connection_and_db_from_config({'sql_engine':'sqlite:///mediagoblin.db'}) clean_orphan_tags() diff --git a/mediagoblin/db/sql_switch.py b/mediagoblin/db/sql_switch.py deleted file mode 100644 index 571adbdb..00000000 --- a/mediagoblin/db/sql_switch.py +++ /dev/null @@ -1 +0,0 @@ -use_sql = True diff --git a/mediagoblin/db/util.py b/mediagoblin/db/util.py index a8c8c92b..9fa3b204 100644 --- a/mediagoblin/db/util.py +++ b/mediagoblin/db/util.py @@ -14,16 +14,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -try: - from mediagoblin.db.sql_switch import use_sql -except ImportError: - use_sql = False - -if use_sql: - from mediagoblin.db.sql.fake import ObjectId, InvalidId, DESCENDING - from mediagoblin.db.sql.util import atomic_update, check_media_slug_used, \ - media_entries_for_tag_slug, check_collection_slug_used -else: - from mediagoblin.db.mongo.util import \ - ObjectId, InvalidId, DESCENDING, atomic_update, \ - check_media_slug_used, media_entries_for_tag_slug +#TODO: check now after mongo removal if we can't rip out a layer of abstraction +from mediagoblin.db.sql.fake import ObjectId, InvalidId, DESCENDING +from mediagoblin.db.sql.util import atomic_update, check_media_slug_used, \ + media_entries_for_tag_slug, check_collection_slug_used diff --git a/mediagoblin/gmg_commands/__init__.py b/mediagoblin/gmg_commands/__init__.py index 19793ec3..6aed4f6c 100644 --- a/mediagoblin/gmg_commands/__init__.py +++ b/mediagoblin/gmg_commands/__init__.py @@ -25,11 +25,6 @@ SUBCOMMAND_MAP = { 'setup': 'mediagoblin.gmg_commands.shell:shell_parser_setup', 'func': 'mediagoblin.gmg_commands.shell:shell', 'help': 'Run a shell with some tools pre-setup'}, - 'migrate': { - 'setup': 'mediagoblin.gmg_commands.migrate:migrate_parser_setup', - 'func': 'mediagoblin.gmg_commands.migrate:migrate', - 'help': ('Migrate your Mongo database. ' - '[DEPRECATED!] use convert_mongo_to_sql and dbupdate.')}, 'adduser': { 'setup': 'mediagoblin.gmg_commands.users:adduser_parser_setup', 'func': 'mediagoblin.gmg_commands.users:adduser', @@ -46,10 +41,6 @@ SUBCOMMAND_MAP = { 'setup': 'mediagoblin.gmg_commands.dbupdate:dbupdate_parse_setup', 'func': 'mediagoblin.gmg_commands.dbupdate:dbupdate', 'help': 'Set up or update the SQL database'}, - 'convert_mongo_to_sql': { - 'setup': 'mediagoblin.gmg_commands.mongosql:mongosql_parser_setup', - 'func': 'mediagoblin.gmg_commands.mongosql:mongosql', - 'help': 'Convert Mongo DB data to SQL DB data'}, 'theme': { 'setup': 'mediagoblin.gmg_commands.theme:theme_parser_setup', 'func': 'mediagoblin.gmg_commands.theme:theme', diff --git a/mediagoblin/gmg_commands/dbupdate.py b/mediagoblin/gmg_commands/dbupdate.py index 67fdd69c..4b301178 100644 --- a/mediagoblin/gmg_commands/dbupdate.py +++ b/mediagoblin/gmg_commands/dbupdate.py @@ -114,7 +114,7 @@ def run_dbupdate(app_config, global_config): global_config.get('plugins', {}).keys()) # Set up the database - connection, db = setup_connection_and_db_from_config(app_config) + db = setup_connection_and_db_from_config(app_config) Session = sessionmaker(bind=db.engine) diff --git a/mediagoblin/gmg_commands/import_export.py b/mediagoblin/gmg_commands/import_export.py index 72ebd8a8..d51a1e3e 100644 --- a/mediagoblin/gmg_commands/import_export.py +++ b/mediagoblin/gmg_commands/import_export.py @@ -105,7 +105,7 @@ def env_import(args): setup_storage() global_config, app_config = setup_global_and_app_config(args.conf_file) - connection, db = setup_connection_and_db_from_config( + db = setup_connection_and_db_from_config( app_config) tf = tarfile.open( @@ -243,8 +243,7 @@ def env_export(args): setup_storage() - connection, db = setup_connection_and_db_from_config( - app_config) + db = setup_connection_and_db_from_config(app_config) _export_database(db, args) diff --git a/mediagoblin/gmg_commands/migrate.py b/mediagoblin/gmg_commands/migrate.py deleted file mode 100644 index b915a528..00000000 --- a/mediagoblin/gmg_commands/migrate.py +++ /dev/null @@ -1,75 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import sys - -from mediagoblin.init import setup_global_and_app_config - - -def migrate_parser_setup(subparser): - pass - - -def _print_started_migration(migration_number, migration_func): - sys.stdout.write( - "Running migration %s, '%s'... " % ( - migration_number, migration_func.func_name)) - sys.stdout.flush() - - -def _print_finished_migration(migration_number, migration_func): - sys.stdout.write("done.\n") - sys.stdout.flush() - - -def migrate(args): - run_migrate(args.conf_file) - - -def run_migrate(conf_file): - # This MUST be imported so as to set up the appropriate migrations! - from mediagoblin.db.mongo import migrations - - from mediagoblin.db.mongo import util as db_util - from mediagoblin.db.mongo.open import setup_connection_and_db_from_config - - global_config, app_config = setup_global_and_app_config(conf_file) - - connection, db = setup_connection_and_db_from_config( - app_config, use_pymongo=True) - migration_manager = db_util.MigrationManager(db) - - # Clear old indexes - print "== Clearing old indexes... ==" - removed_indexes = db_util.remove_deprecated_indexes(db) - - for collection, index_name in removed_indexes: - print "Removed index '%s' in collection '%s'" % ( - index_name, collection) - - # Migrate - print "\n== Applying migrations... ==" - migration_manager.migrate_new( - pre_callback=_print_started_migration, - post_callback=_print_finished_migration) - - # Add new indexes - print "\n== Adding new indexes... ==" - new_indexes = db_util.add_new_indexes(db) - - for collection, index_name in new_indexes: - print "Added index '%s' to collection '%s'" % ( - index_name, collection) diff --git a/mediagoblin/gmg_commands/mongosql.py b/mediagoblin/gmg_commands/mongosql.py deleted file mode 100644 index dd53f575..00000000 --- a/mediagoblin/gmg_commands/mongosql.py +++ /dev/null @@ -1,28 +0,0 @@ -# GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2012 MediaGoblin contributors. See AUTHORS. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - - -def mongosql_parser_setup(subparser): - pass - - -def mongosql(args): - # First, make sure our mongo migrations are up to date... - from mediagoblin.gmg_commands.migrate import run_migrate - run_migrate(args.conf_file) - - from mediagoblin.db.sql.convert import run_conversion - run_conversion(args.conf_file) diff --git a/mediagoblin/init/__init__.py b/mediagoblin/init/__init__.py index 8d70c4ef..ab6e6399 100644 --- a/mediagoblin/init/__init__.py +++ b/mediagoblin/init/__init__.py @@ -66,15 +66,13 @@ def setup_database(): load_models(app_config) # Set up the database - connection, db = setup_connection_and_db_from_config(app_config) + db = setup_connection_and_db_from_config(app_config) check_db_migrations_current(db) - setup_globals( - db_connection=connection, - database=db) + setup_globals(database=db) - return connection, db + return db def get_jinja_loader(user_template_path=None, current_theme=None, diff --git a/mediagoblin/listings/views.py b/mediagoblin/listings/views.py index a8824390..3064096c 100644 --- a/mediagoblin/listings/views.py +++ b/mediagoblin/listings/views.py @@ -36,10 +36,6 @@ def _get_tag_name_from_entries(media_entries, tag_slug): tag_name = tag['name'] break break - # TODO: Remove after SQL-switch, it's mongo specific - if hasattr(media_entries, "rewind"): - media_entries.rewind() - return tag_name diff --git a/mediagoblin/mg_globals.py b/mediagoblin/mg_globals.py index 356a944d..8c7c64c2 100644 --- a/mediagoblin/mg_globals.py +++ b/mediagoblin/mg_globals.py @@ -26,10 +26,7 @@ import threading # General mediagoblin globals ############################# -# mongokit.Connection -db_connection = None - -# mongokit.Connection +# SQL database engine database = None # beaker's cache manager diff --git a/mediagoblin/templates/mediagoblin/utils/object_gallery.html b/mediagoblin/templates/mediagoblin/utils/object_gallery.html index 0cd8cdab..d328b552 100644 --- a/mediagoblin/templates/mediagoblin/utils/object_gallery.html +++ b/mediagoblin/templates/mediagoblin/utils/object_gallery.html @@ -47,7 +47,7 @@ Args: - request: Request - - media_entries: pymongo cursor of media entries + - media_entries: db cursor of media entries - pagination: Paginator object - pagination_base_url: If you want the pagination to point to a different URL, point it here diff --git a/mediagoblin/tests/test_globals.py b/mediagoblin/tests/test_globals.py index 98f6a436..303f89e2 100644 --- a/mediagoblin/tests/test_globals.py +++ b/mediagoblin/tests/test_globals.py @@ -18,23 +18,20 @@ from nose.tools import assert_raises from mediagoblin import mg_globals + class TestGlobals(object): def setUp(self): - self.old_connection = mg_globals.db_connection self.old_database = mg_globals.database def tearDown(self): - mg_globals.db_connection = self.old_connection mg_globals.database = self.old_database def test_setup_globals(self): mg_globals.setup_globals( - db_connection='my favorite db_connection!', database='my favorite database!', public_store='my favorite public_store!', queue_store='my favorite queue_store!') - assert mg_globals.db_connection == 'my favorite db_connection!' assert mg_globals.database == 'my favorite database!' assert mg_globals.public_store == 'my favorite public_store!' assert mg_globals.queue_store == 'my favorite queue_store!' diff --git a/mediagoblin/tools/pagination.py b/mediagoblin/tools/pagination.py index af889abd..4c9b3118 100644 --- a/mediagoblin/tools/pagination.py +++ b/mediagoblin/tools/pagination.py @@ -25,7 +25,7 @@ PAGINATION_DEFAULT_PER_PAGE = 30 class Pagination(object): """ - Pagination class for mongodb queries. + Pagination class for database queries. Initialization through __init__(self, cursor, page=1, per_page=2), get actual data slice through __call__().