X-Git-Url: https://vcs.fsf.org/?a=blobdiff_plain;f=mediagoblin%2Fsubmit%2Fviews.py;h=ad9fedae1263db70b2e27444f649fcc57123d951;hb=86bb44ef121e64e2a2c7ad175af444000a7ca0c9;hp=cdd097ecc5c0f71af1024de99e72fb083a819d3b;hpb=cf29e8a824e0ef4612f1144f079c80c1d20b89e5;p=mediagoblin.git diff --git a/mediagoblin/submit/views.py b/mediagoblin/submit/views.py index cdd097ec..ad9fedae 100644 --- a/mediagoblin/submit/views.py +++ b/mediagoblin/submit/views.py @@ -14,29 +14,27 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +from mediagoblin import messages import mediagoblin.mg_globals as mg_globals import uuid from os.path import splitext -from cgi import FieldStorage -from celery import registry -import urllib,urllib2 import logging _log = logging.getLogger(__name__) from werkzeug.utils import secure_filename +from werkzeug.datastructures import FileStorage -from mediagoblin.db.util import ObjectId -from mediagoblin.tools.text import cleaned_markdown_conversion, convert_to_tag_list_of_dicts +from mediagoblin.tools.text import convert_to_tag_list_of_dicts from mediagoblin.tools.translate import pass_to_ugettext as _ from mediagoblin.tools.response import render_to_response, redirect from mediagoblin.decorators import require_active_login -from mediagoblin.submit import forms as submit_forms, security -from mediagoblin.processing import mark_entry_failed, ProcessMedia +from mediagoblin.submit import forms as submit_forms from mediagoblin.messages import add_message, SUCCESS -from mediagoblin.media_types import get_media_type_and_manager, \ +from mediagoblin.media_types import sniff_media, \ InvalidFileType, FileTypeNotSupported +from mediagoblin.submit.lib import handle_push_urls, run_process_media @require_active_login @@ -44,47 +42,55 @@ def submit_start(request): """ First view for submitting a file. """ - submit_form = submit_forms.SubmitStartForm(request.POST) + submit_form = submit_forms.SubmitStartForm(request.form) if request.method == 'POST' and submit_form.validate(): - if not ('file' in request.POST - and isinstance(request.POST['file'], FieldStorage) - and request.POST['file'].file): + if not ('file' in request.files + and isinstance(request.files['file'], FileStorage) + and request.files['file'].stream): submit_form.file.errors.append( _(u'You must provide a file.')) else: try: - filename = request.POST['file'].filename - media_type, media_manager = get_media_type_and_manager(filename) + filename = request.files['file'].filename + + # Sniff the submitted media to determine which + # media plugin should handle processing + media_type, media_manager = sniff_media( + request.files['file']) # create entry and save in database entry = request.db.MediaEntry() - entry['_id'] = ObjectId() entry.media_type = unicode(media_type) entry.title = ( - unicode(request.POST['title']) + unicode(request.form['title']) or unicode(splitext(filename)[0])) - entry.description = unicode(request.POST.get('description')) - entry.description_html = cleaned_markdown_conversion( - entry.description) + entry.description = unicode(request.form.get('description')) - entry.license = unicode(request.POST.get('license', "")) or None + entry.license = unicode(request.form.get('license', "")) or None - entry.uploader = request.user._id + entry.uploader = request.user.id # Process the user's folksonomy "tags" entry.tags = convert_to_tag_list_of_dicts( - request.POST.get('tags')) + request.form.get('tags')) # Generate a slug from the title entry.generate_slug() + # We generate this ourselves so we know what the taks id is for + # retrieval later. + + # (If we got it off the task's auto-generation, there'd be + # a risk of a race condition when we'd save after sending + # off the task) + task_id = unicode(uuid.uuid4()) # Now store generate the queueing related filename queue_filepath = request.app.queue_store.get_unique_filepath( ['media_entries', - unicode(entry._id), + task_id, secure_filename(filename)]) # queue appropriately @@ -92,68 +98,23 @@ def submit_start(request): queue_filepath, 'wb') with queue_file: - queue_file.write(request.POST['file'].file.read()) + queue_file.write(request.files['file'].stream.read()) # Add queued filename to the entry entry.queued_media_file = queue_filepath - # We generate this ourselves so we know what the taks id is for - # retrieval later. - - # (If we got it off the task's auto-generation, there'd be - # a risk of a race condition when we'd save after sending - # off the task) - task_id = unicode(uuid.uuid4()) - entry['queued_task_id'] = task_id + entry.queued_task_id = task_id # Save now so we have this data before kicking off processing - entry.save(validate=True) + entry.save() # Pass off to processing # # (... don't change entry after this point to avoid race # conditions with changes to the document via processing code) - process_media = registry.tasks[ProcessMedia.name] - try: - process_media.apply_async( - [unicode(entry._id)], {}, - task_id=task_id) - except BaseException as exc: - # The purpose of this section is because when running in "lazy" - # or always-eager-with-exceptions-propagated celery mode that - # the failure handling won't happen on Celery end. Since we - # expect a lot of users to run things in this way we have to - # capture stuff here. - # - # ... not completely the diaper pattern because the - # exception is re-raised :) - mark_entry_failed(entry._id, exc) - # re-raise the exception - raise + run_process_media(entry) - if mg_globals.app_config["push_urls"]: - feed_url=request.urlgen( - 'mediagoblin.user_pages.atom_feed', - qualified=True,user=request.user.username) - hubparameters = { - 'hub.mode': 'publish', - 'hub.url': feed_url} - hubdata = urllib.urlencode(hubparameters) - hubheaders = { - "Content-type": "application/x-www-form-urlencoded", - "Connection": "close"} - for huburl in mg_globals.app_config["push_urls"]: - hubrequest = urllib2.Request(huburl, hubdata, hubheaders) - try: - hubresponse = urllib2.urlopen(hubrequest) - except urllib2.HTTPError as exc: - # This is not a big issue, the item will be fetched - # by the PuSH server next time we hit it - _log.warning( - "push url %r gave error %r", huburl, exc.code) - except urllib2.URLError as exc: - _log.warning( - "push url %r is unreachable %r", huburl, exc.reason) + handle_push_urls(request) add_message(request, SUCCESS, _('Woohoo! Submitted!')) @@ -161,10 +122,9 @@ def submit_start(request): user=request.user.username) except Exception as e: ''' - This section is intended to catch exceptions raised in - mediagobling.media_types + This section is intended to catch exceptions raised in + mediagoblin.media_types ''' - if isinstance(e, InvalidFileType) or \ isinstance(e, FileTypeNotSupported): submit_form.file.errors.append( @@ -177,3 +137,44 @@ def submit_start(request): 'mediagoblin/submit/start.html', {'submit_form': submit_form, 'app_config': mg_globals.app_config}) + +@require_active_login +def add_collection(request, media=None): + """ + View to create a new collection + """ + submit_form = submit_forms.AddCollectionForm(request.form) + + if request.method == 'POST' and submit_form.validate(): + try: + collection = request.db.Collection() + + collection.title = unicode(request.form['title']) + collection.description = unicode(request.form.get('description')) + collection.creator = request.user.id + collection.generate_slug() + + # Make sure this user isn't duplicating an existing collection + existing_collection = request.db.Collection.find_one({ + 'creator': request.user.id, + 'title':collection.title}) + + if existing_collection: + messages.add_message( + request, messages.ERROR, _('You already have a collection called "%s"!' % collection.title)) + else: + collection.save() + + add_message(request, SUCCESS, _('Collection "%s" added!' % collection.title)) + + return redirect(request, "mediagoblin.user_pages.user_home", + user=request.user.username) + + except Exception as e: + raise + + return render_to_response( + request, + 'mediagoblin/submit/collection.html', + {'submit_form': submit_form, + 'app_config': mg_globals.app_config})