Make changes for displaying page listing all the blogs created by user.
[mediagoblin.git] / mediagoblin / processing / task.py
index b29de9bd89883656558c7384b83983e67f33256c..7f68348566b5897b68d23c0c4a8b634c08b07edd 100644 (file)
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 import logging
+import urllib
+import urllib2
 
-from celery.task import Task
+import celery
+from celery.registry import tasks
 
 from mediagoblin import mg_globals as mgg
-from mediagoblin.db.models import MediaEntry
-from mediagoblin.processing import mark_entry_failed, BaseProcessingFail
+from . import mark_entry_failed, BaseProcessingFail
 from mediagoblin.tools.processing import json_processing_callback
+from mediagoblin.processing import get_entry_and_processing_manager
 
 _log = logging.getLogger(__name__)
 logging.basicConfig()
 _log.setLevel(logging.DEBUG)
 
 
+@celery.task(default_retry_delay=2 * 60)
+def handle_push_urls(feed_url):
+    """Subtask, notifying the PuSH servers of new content
+
+    Retry 3 times every 2 minutes if run in separate process before failing."""
+    if not mgg.app_config["push_urls"]:
+        return # Nothing to do
+    _log.debug('Notifying Push servers for feed {0}'.format(feed_url))
+    hubparameters = {
+        'hub.mode': 'publish',
+        'hub.url': feed_url}
+    hubdata = urllib.urlencode(hubparameters)
+    hubheaders = {
+        "Content-type": "application/x-www-form-urlencoded",
+        "Connection": "close"}
+    for huburl in mgg.app_config["push_urls"]:
+        hubrequest = urllib2.Request(huburl, hubdata, hubheaders)
+        try:
+            hubresponse = urllib2.urlopen(hubrequest)
+        except (urllib2.HTTPError, urllib2.URLError) as exc:
+            # We retry by default 3 times before failing
+            _log.info("PuSH url %r gave error %r", huburl, exc)
+            try:
+                return handle_push_urls.retry(exc=exc, throw=False)
+            except Exception as e:
+                # All retries failed, Failure is no tragedy here, probably.
+                _log.warn('Failed to notify PuSH server for feed {0}. '
+                          'Giving up.'.format(feed_url))
+                return False
+
+
 ################################
 # Media processing initial steps
 ################################
-
-class ProcessMedia(Task):
+class ProcessMedia(celery.Task):
     """
     Pass this entry off for processing.
     """
-    def run(self, media_id):
+    def run(self, media_id, feed_url, reprocess_action, reprocess_info=None):
         """
         Pass the media entry off to the appropriate processing function
         (for now just process_image...)
+
+        :param feed_url: The feed URL that the PuSH server needs to be
+            updated for.
+        :param reprocess: A dict containing all of the necessary reprocessing
+            info for the media_type.
         """
-        entry = MediaEntry.query.get(media_id)
+        reprocess_info = reprocess_info or {}
+        entry, manager = get_entry_and_processing_manager(media_id)
 
         # Try to process, and handle expected errors.
         try:
-            entry.state = u'processing'
-            entry.save()
-
-            _log.debug('Processing {0}'.format(entry))
-
-            # run the processing code
-            entry.media_manager['processor'](entry)
+            processor_class = manager.get_processor(reprocess_action, entry)
+
+            with processor_class(manager, entry) as processor:
+                # Initial state change has to be here because
+                # the entry.state gets recorded on processor_class init
+                entry.state = u'processing'
+                entry.save()
+
+                _log.debug('Processing {0}'.format(entry))
+
+                try:
+                    processor.process(**reprocess_info)
+                except Exception as exc:
+                    if processor.entry_orig_state == 'processed':
+                        _log.error(
+                            'Entry {0} failed to process due to the following'
+                            ' error: {1}'.format(entry.id, exc))
+                        _log.info(
+                            'Setting entry.state back to "processed"')
+                        pass
+                    else:
+                        raise
 
             # We set the state to processed and save the entry here so there's
             # no need to save at the end of the processing stage, probably ;)
             entry.state = u'processed'
             entry.save()
 
+            # Notify the PuSH servers as async task
+            if mgg.app_config["push_urls"] and feed_url:
+                handle_push_urls.subtask().delay(feed_url)
+
             json_processing_callback(entry)
         except BaseProcessingFail as exc:
             mark_entry_failed(entry.id, exc)
@@ -97,3 +155,5 @@ class ProcessMedia(Task):
 
         entry = mgg.database.MediaEntry.query.filter_by(id=entry_id).first()
         json_processing_callback(entry)
+
+tasks.register(ProcessMedia)