Switch over media processor to use the get_workbench decorator (#565)
[mediagoblin.git] / mediagoblin / media_types / image / processing.py
index 57eb75dbb98232fab6128f714656a9ebfab68cee..e6a34ca004e44a3f011f3fcdcba96b1d27629948 100644 (file)
@@ -1,5 +1,5 @@
 # GNU MediaGoblin -- federated, autonomous media hosting
-# Copyright (C) 2011 MediaGoblin contributors.  See AUTHORS.
+# Copyright (C) 2011, 2012 MediaGoblin contributors.  See AUTHORS.
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published by
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 import Image
+import os
+import logging
 
-from celery.task import Task
-from celery import registry
-
-from mediagoblin.db.util import ObjectId
 from mediagoblin import mg_globals as mgg
+from mediagoblin.decorators import get_workbench
+from mediagoblin.processing import BadMediaFail, \
+    create_pub_filepath, FilenameBuilder
+from mediagoblin.tools.exif import exif_fix_image_orientation, \
+    extract_exif, clean_exif, get_gps_data, get_useful, \
+    exif_image_needs_rotation
 
-from mediagoblin.util import lazy_pass_to_ugettext as _
-
-from mediagoblin.process_media.errors import *
-
-THUMB_SIZE = 180, 180
-MEDIUM_SIZE = 640, 640
-
+_log = logging.getLogger(__name__)
 
-def create_pub_filepath(entry, filename):
-    return mgg.public_store.get_unique_filepath(
-            ['media_entries',
-             unicode(entry['_id']),
-             filename])
 
-################################
-# Media processing initial steps
-################################
-
-class ProcessMedia(Task):
+def resize_image(entry, filename, new_path, exif_tags, workdir, new_size,
+                 size_limits=(0, 0)):
     """
-    Pass this entry off for processing.
+    Store a resized version of an image and return its pathname.
+
+    Arguments:
+    entry -- the entry for the image to resize
+    filename -- the filename of the original image being resized
+    new_path -- public file path for the new resized image
+    exif_tags -- EXIF data for the original image
+    workdir -- directory path for storing converted image files
+    new_size -- 2-tuple size for the resized image
     """
-    def run(self, media_id):
-        """
-        Pass the media entry off to the appropriate processing function
-        (for now just process_image...)
-        """
-        entry = mgg.database.MediaEntry.one(
-            {'_id': ObjectId(media_id)})
-
-        # Try to process, and handle expected errors.
-        try:
-            process_image(entry)
-        except BaseProcessingFail, exc:
-            mark_entry_failed(entry[u'_id'], exc)
-            return
+    try:
+        resized = Image.open(filename)
+    except IOError:
+        raise BadMediaFail()
+    resized = exif_fix_image_orientation(resized, exif_tags)  # Fix orientation
+    resized.thumbnail(new_size, Image.ANTIALIAS)
 
-        entry['state'] = u'processed'
-        entry.save()
+    # Copy the new file to the conversion subdir, then remotely.
+    tmp_resized_filename = os.path.join(workdir, new_path[-1])
+    with file(tmp_resized_filename, 'w') as resized_file:
+        resized.save(resized_file)
+    mgg.public_store.copy_local_to_storage(tmp_resized_filename, new_path)
 
-    def on_failure(self, exc, task_id, args, kwargs, einfo):
-        """
-        If the processing failed we should mark that in the database.
 
-        Assuming that the exception raised is a subclass of BaseProcessingFail,
-        we can use that to get more information about the failure and store that
-        for conveying information to users about the failure, etc.
-        """
-        entry_id = args[0]
-        mark_entry_failed(entry_id, exc)
+SUPPORTED_FILETYPES = ['png', 'gif', 'jpg', 'jpeg']
 
 
-process_media = registry.tasks[ProcessMedia.name]
+def sniff_handler(media_file, **kw):
+    if kw.get('media') is not None:  # That's a double negative!
+        name, ext = os.path.splitext(kw['media'].filename)
+        clean_ext = ext[1:].lower()  # Strip the . from ext and make lowercase
 
+        if clean_ext in SUPPORTED_FILETYPES:
+            _log.info('Found file extension in supported filetypes')
+            return True
+        else:
+            _log.debug('Media present, extension not found in {0}'.format(
+                    SUPPORTED_FILETYPES))
+    else:
+        _log.warning('Need additional information (keyword argument \'media\')'
+                     ' to be able to handle sniffing')
 
-def mark_entry_failed(entry_id, exc):
-    """
-    Mark a media entry as having failed in its conversion.
+    return False
 
-    Uses the exception that was raised to mark more information.  If the
-    exception is a derivative of BaseProcessingFail then we can store extra
-    information that can be useful for users telling them why their media failed
-    to process.
 
-    Args:
-     - entry_id: The id of the media entry
+@get_workbench
+def process_image(entry, workbench=None):
+    """Code to process an image. Will be run by celery.
 
+    A Workbench() represents a local tempory dir. It is automatically
+    cleaned up when this function exits.
     """
-    # Was this a BaseProcessingFail?  In other words, was this a
-    # type of error that we know how to handle?
-    if isinstance(exc, BaseProcessingFail):
-        # Looks like yes, so record information about that failure and any
-        # metadata the user might have supplied.
-        mgg.database['media_entries'].update(
-            {'_id': entry_id},
-            {'$set': {u'state': u'failed',
-                      u'fail_error': exc.exception_path,
-                      u'fail_metadata': exc.metadata}})
-    else:
-        # Looks like no, so just mark it as failed and don't record a
-        # failure_error (we'll assume it wasn't handled) and don't record
-        # metadata (in fact overwrite it if somehow it had previous info
-        # here)
-        mgg.database['media_entries'].update(
-            {'_id': entry_id},
-            {'$set': {u'state': u'failed',
-                      u'fail_error': None,
-                      u'fail_metadata': {}}})
-
-
-def process_image(entry):
-    """
-    Code to process an image
-    """
-    workbench = mgg.workbench_manager.create_workbench()
-
-    queued_filepath = entry['queued_media_file']
+    # Conversions subdirectory to avoid collisions
+    conversions_subdir = os.path.join(
+        workbench.dir, 'conversions')
+    os.mkdir(conversions_subdir)
+    queued_filepath = entry.queued_media_file
     queued_filename = workbench.localized_file(
         mgg.queue_store, queued_filepath,
         'source')
+    name_builder = FilenameBuilder(queued_filename)
 
-    try:
-        thumb = Image.open(queued_filename)
-    except IOError:
-        raise BadMediaFail()
+    # EXIF extraction
+    exif_tags = extract_exif(queued_filename)
+    gps_data = get_gps_data(exif_tags)
 
-    thumb.thumbnail(THUMB_SIZE, Image.ANTIALIAS)
-    # ensure color mode is compatible with jpg
-    if thumb.mode != "RGB":
-        thumb = thumb.convert("RGB")
-
-    thumb_filepath = create_pub_filepath(entry, 'thumbnail.jpg')
-    thumb_file = mgg.public_store.get_file(thumb_filepath, 'w')
-
-    with thumb_file:
-        thumb.save(thumb_file, "JPEG", quality=90)
+    # Always create a small thumbnail
+    thumb_filepath = create_pub_filepath(
+        entry, name_builder.fill('{basename}.thumbnail{ext}'))
+    resize_image(entry, queued_filename, thumb_filepath,
+                exif_tags, conversions_subdir,
+                (mgg.global_config['media:thumb']['max_width'],
+                 mgg.global_config['media:thumb']['max_height']))
 
     # If the size of the original file exceeds the specified size of a `medium`
-    # file, a `medium.jpg` files is created and later associated with the media
+    # file, a `.medium.jpg` files is created and later associated with the media
     # entry.
     medium = Image.open(queued_filename)
-    medium_processed = False
+    if medium.size[0] > mgg.global_config['media:medium']['max_width'] \
+        or medium.size[1] > mgg.global_config['media:medium']['max_height'] \
+        or exif_image_needs_rotation(exif_tags):
+        medium_filepath = create_pub_filepath(
+            entry, name_builder.fill('{basename}.medium{ext}'))
+        resize_image(
+            entry, queued_filename, medium_filepath,
+            exif_tags, conversions_subdir,
+            (mgg.global_config['media:medium']['max_width'],
+             mgg.global_config['media:medium']['max_height']))
+    else:
+        medium_filepath = None
 
-    if medium.size[0] > MEDIUM_SIZE[0] or medium.size[1] > MEDIUM_SIZE[1]:
-        medium.thumbnail(MEDIUM_SIZE, Image.ANTIALIAS)
+    # Copy our queued local workbench to its final destination
+    original_filepath = create_pub_filepath(
+            entry, name_builder.fill('{basename}{ext}'))
+    mgg.public_store.copy_local_to_storage(queued_filename, original_filepath)
 
-        if medium.mode != "RGB":
-            medium = medium.convert("RGB")
+    # Remove queued media file from storage and database
+    mgg.queue_store.delete_file(queued_filepath)
+    entry.queued_media_file = []
 
-        medium_filepath = create_pub_filepath(entry, 'medium.jpg')
-        medium_file = mgg.public_store.get_file(medium_filepath, 'w')
+    # Insert media file information into database
+    media_files_dict = entry.setdefault('media_files', {})
+    media_files_dict[u'thumb'] = thumb_filepath
+    media_files_dict[u'original'] = original_filepath
+    if medium_filepath:
+        media_files_dict[u'medium'] = medium_filepath
 
-        with medium_file:
-            medium.save(medium_file, "JPEG", quality=90)
-            medium_processed = True
+    # Insert exif data into database
+    exif_all = clean_exif(exif_tags)
 
-    # we have to re-read because unlike PIL, not everything reads
-    # things in string representation :)
-    queued_file = file(queued_filename, 'rb')
+    if len(exif_all):
+        entry.media_data_init(exif_all=exif_all)
 
-    with queued_file:
-        original_filepath = create_pub_filepath(entry, queued_filepath[-1])
+    if len(gps_data):
+        for key in list(gps_data.keys()):
+            gps_data['gps_' + key] = gps_data.pop(key)
+        entry.media_data_init(**gps_data)
 
-        with mgg.public_store.get_file(original_filepath, 'wb') as original_file:
-            original_file.write(queued_file.read())
 
-    mgg.queue_store.delete_file(queued_filepath)
-    entry['queued_media_file'] = []
-    media_files_dict = entry.setdefault('media_files', {})
-    media_files_dict['thumb'] = thumb_filepath
-    media_files_dict['original'] = original_filepath
-    if medium_processed:
-        media_files_dict['medium'] = medium_filepath
+if __name__ == '__main__':
+    import sys
+    import pprint
+
+    pp = pprint.PrettyPrinter()
+
+    result = extract_exif(sys.argv[1])
+    gps = get_gps_data(result)
+    clean = clean_exif(result)
+    useful = get_useful(clean)
 
-    # clean up workbench
-    workbench.destroy_self()
+    print pp.pprint(
+        clean)