callback = Column(Unicode, nullable=False, default=u"oob")
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
-
+
class AccessToken(Base):
"""
Model for representing the access tokens
request_token = Column(Unicode, ForeignKey(RequestToken.token))
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
-
+
class NonceTimestamp(Base):
"""
[ProcessingNotification, CommentNotification])
MODELS = [
- User, Client, RequestToken, AccessToken, NonceTimestamp, MediaEntry, Tag,
- MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
+ User, Client, RequestToken, AccessToken, NonceTimestamp, MediaEntry, Tag,
+ MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
MediaAttachmentFile, ProcessingMetaData, Notification, CommentNotification,
ProcessingNotification, CommentSubscription]
"""
- Foundations are the default rows that are created immediately after the tables
+ Foundations are the default rows that are created immediately after the tables
are initialized. Each entry to this dictionary should be in the format of:
ModelConstructorObject:List of Dictionaries
(Each Dictionary represents a row on the Table to be created, containing each
return u"%s:%s" % (
self.__class__.__module__, self.__class__.__name__)
- def __init__(self, *args, **kwargs):
- # next line is REQUIRED to have pickable exceptions if you want
- # to be able to pass in custom arguments (see celery docs)
- Exception.__init__(self, *args, **metadata)
+ def __init__(self, **metadata):
+ self.metadata = metadata or {}
class BadMediaFail(BaseProcessingFail):
"""
import urllib
import urllib2
-#TODO: newer celeries use from celery import Task. Change when we upgrade
-from celery.task import Task
+import celery
from celery.registry import tasks
from mediagoblin import mg_globals as mgg
-from mediagoblin.db.sql.models import MediaEntry
-from mediagoblin.processing import mark_entry_failed, BaseProcessingFail
+from mediagoblin.db.models import MediaEntry
+from mediagoblin.processing import (mark_entry_failed, BaseProcessingFail,
+ ProcessingState)
from mediagoblin.tools.processing import json_processing_callback
_log = logging.getLogger(__name__)
_log.setLevel(logging.DEBUG)
-@task.task(default_retry_delay=2 * 60)
+@celery.task(default_retry_delay=2 * 60)
def handle_push_urls(feed_url):
"""Subtask, notifying the PuSH servers of new content
'Giving up.'.format(feed_url))
return False
+
################################
# Media processing initial steps
################################
-class ProcessMedia(Task):
+class ProcessMedia(celery.Task):
+ """
+ Pass this entry off for processing.
+ """
track_started=True
def run(self, media_id):
# Try to process, and handle expected errors.
try:
entry.state = u'processing'
- entry.queued_task_id = self.request.id
entry.save()
_log.debug('Processing {0}'.format(entry))
from mediagoblin.db.models import MediaEntry
from mediagoblin.processing import mark_entry_failed
-from mediagoblin.processing.task import process_media
+from mediagoblin.processing.task import ProcessMedia
_log = logging.getLogger(__name__)
'mediagoblin.user_pages.atom_feed',qualified=True,
user=request.user.username)`"""
try:
- process_media.apply_async(
+ ProcessMedia().apply_async(
[entry.id, feed_url], {},
task_id=entry.queued_task_id)
except BaseException as exc: