From: Rodney Ewing Date: Mon, 19 Aug 2013 19:58:00 +0000 (-0700) Subject: -update to latest master X-Git-Url: https://vcs.fsf.org/?a=commitdiff_plain;h=b505952508f717e0d4f59d24c87a54ef42673c3f;p=mediagoblin.git -update to latest master - have mg generate task_id remove --- diff --git a/mediagoblin/db/models.py b/mediagoblin/db/models.py index f0cbce2a..9cb39ff4 100644 --- a/mediagoblin/db/models.py +++ b/mediagoblin/db/models.py @@ -146,7 +146,7 @@ class RequestToken(Base): callback = Column(Unicode, nullable=False, default=u"oob") created = Column(DateTime, nullable=False, default=datetime.datetime.now) updated = Column(DateTime, nullable=False, default=datetime.datetime.now) - + class AccessToken(Base): """ Model for representing the access tokens @@ -159,7 +159,7 @@ class AccessToken(Base): request_token = Column(Unicode, ForeignKey(RequestToken.token)) created = Column(DateTime, nullable=False, default=datetime.datetime.now) updated = Column(DateTime, nullable=False, default=datetime.datetime.now) - + class NonceTimestamp(Base): """ @@ -646,13 +646,13 @@ with_polymorphic( [ProcessingNotification, CommentNotification]) MODELS = [ - User, Client, RequestToken, AccessToken, NonceTimestamp, MediaEntry, Tag, - MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames, + User, Client, RequestToken, AccessToken, NonceTimestamp, MediaEntry, Tag, + MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames, MediaAttachmentFile, ProcessingMetaData, Notification, CommentNotification, ProcessingNotification, CommentSubscription] """ - Foundations are the default rows that are created immediately after the tables + Foundations are the default rows that are created immediately after the tables are initialized. Each entry to this dictionary should be in the format of: ModelConstructorObject:List of Dictionaries (Each Dictionary represents a row on the Table to be created, containing each diff --git a/mediagoblin/processing/__init__.py b/mediagoblin/processing/__init__.py index ae3652cf..454eb09b 100644 --- a/mediagoblin/processing/__init__.py +++ b/mediagoblin/processing/__init__.py @@ -181,10 +181,8 @@ class BaseProcessingFail(Exception): return u"%s:%s" % ( self.__class__.__module__, self.__class__.__name__) - def __init__(self, *args, **kwargs): - # next line is REQUIRED to have pickable exceptions if you want - # to be able to pass in custom arguments (see celery docs) - Exception.__init__(self, *args, **metadata) + def __init__(self, **metadata): + self.metadata = metadata or {} class BadMediaFail(BaseProcessingFail): """ diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 550906d0..bb09daec 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -18,13 +18,13 @@ import logging import urllib import urllib2 -#TODO: newer celeries use from celery import Task. Change when we upgrade -from celery.task import Task +import celery from celery.registry import tasks from mediagoblin import mg_globals as mgg -from mediagoblin.db.sql.models import MediaEntry -from mediagoblin.processing import mark_entry_failed, BaseProcessingFail +from mediagoblin.db.models import MediaEntry +from mediagoblin.processing import (mark_entry_failed, BaseProcessingFail, + ProcessingState) from mediagoblin.tools.processing import json_processing_callback _log = logging.getLogger(__name__) @@ -32,7 +32,7 @@ logging.basicConfig() _log.setLevel(logging.DEBUG) -@task.task(default_retry_delay=2 * 60) +@celery.task(default_retry_delay=2 * 60) def handle_push_urls(feed_url): """Subtask, notifying the PuSH servers of new content @@ -62,10 +62,14 @@ def handle_push_urls(feed_url): 'Giving up.'.format(feed_url)) return False + ################################ # Media processing initial steps ################################ -class ProcessMedia(Task): +class ProcessMedia(celery.Task): + """ + Pass this entry off for processing. + """ track_started=True def run(self, media_id): @@ -81,7 +85,6 @@ class ProcessMedia(Task): # Try to process, and handle expected errors. try: entry.state = u'processing' - entry.queued_task_id = self.request.id entry.save() _log.debug('Processing {0}'.format(entry)) diff --git a/mediagoblin/submit/lib.py b/mediagoblin/submit/lib.py index 7e85696b..33687a72 100644 --- a/mediagoblin/submit/lib.py +++ b/mediagoblin/submit/lib.py @@ -21,7 +21,7 @@ from werkzeug.datastructures import FileStorage from mediagoblin.db.models import MediaEntry from mediagoblin.processing import mark_entry_failed -from mediagoblin.processing.task import process_media +from mediagoblin.processing.task import ProcessMedia _log = logging.getLogger(__name__) @@ -85,7 +85,7 @@ def run_process_media(entry, feed_url=None): 'mediagoblin.user_pages.atom_feed',qualified=True, user=request.user.username)`""" try: - process_media.apply_async( + ProcessMedia().apply_async( [entry.id, feed_url], {}, task_id=entry.queued_task_id) except BaseException as exc: