Start to fill media_data.exif_all in addition.
[mediagoblin.git] / mediagoblin / media_types / image / processing.py
index 5e8e4e0a75897d65123bb227aee26a8c0a7e9fb4..8a53a969971c325af13ef7eddd11f81c41b3f179 100644 (file)
@@ -1,5 +1,5 @@
 # GNU MediaGoblin -- federated, autonomous media hosting
-# Copyright (C) 2011 MediaGoblin contributors.  See AUTHORS.
+# Copyright (C) 2011, 2012 MediaGoblin contributors.  See AUTHORS.
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published by
 import Image
 import os
 
-from celery.task import Task
-from celery import registry
-
-from mediagoblin.db.util import ObjectId
 from mediagoblin import mg_globals as mgg
-
-from mediagoblin.processing import BaseProcessingFail, \
-    mark_entry_failed, BadMediaFail, create_pub_filepath, THUMB_SIZE, \
-    MEDIUM_SIZE
-
-################################
-# Media processing initial steps
-################################
-
-class ProcessMedia(Task):
-    """
-    Pass this entry off for processing.
-    """
-    def run(self, media_id):
-        """
-        Pass the media entry off to the appropriate processing function
-        (for now just process_image...)
-        """
-        entry = mgg.database.MediaEntry.one(
-            {'_id': ObjectId(media_id)})
-
-        # Try to process, and handle expected errors.
-        try:
-            process_image(entry)
-        except BaseProcessingFail, exc:
-            mark_entry_failed(entry[u'_id'], exc)
-            return
-
-        entry['state'] = u'processed'
-        entry.save()
-
-    def on_failure(self, exc, task_id, args, kwargs, einfo):
-        """
-        If the processing failed we should mark that in the database.
-
-        Assuming that the exception raised is a subclass of BaseProcessingFail,
-        we can use that to get more information about the failure and store that
-        for conveying information to users about the failure, etc.
-        """
-        entry_id = args[0]
-        mark_entry_failed(entry_id, exc)
-
-
-process_media = registry.tasks[ProcessMedia.name]
-
+from mediagoblin.processing import BadMediaFail, \
+    create_pub_filepath, THUMB_SIZE, MEDIUM_SIZE
+from mediagoblin.tools.exif import exif_fix_image_orientation, \
+    extract_exif, clean_exif, get_gps_data, get_useful
 
 def process_image(entry):
     """
@@ -78,27 +33,38 @@ def process_image(entry):
         workbench.dir, 'conversions')
     os.mkdir(conversions_subdir)
 
-    queued_filepath = entry['queued_media_file']
+    queued_filepath = entry.queued_media_file
     queued_filename = workbench.localized_file(
         mgg.queue_store, queued_filepath,
         'source')
 
-    extension = os.path.splitext(queued_filename)[1]
+    filename_bits = os.path.splitext(queued_filename)
+    basename = os.path.split(filename_bits[0])[1]
+    extension = filename_bits[1].lower()
+
+    # EXIF extraction
+    exif_tags = extract_exif(queued_filename)
+    gps_data = get_gps_data(exif_tags)
 
     try:
         thumb = Image.open(queued_filename)
     except IOError:
         raise BadMediaFail()
 
+    thumb = exif_fix_image_orientation(thumb, exif_tags)
+
     thumb.thumbnail(THUMB_SIZE, Image.ANTIALIAS)
 
     # Copy the thumb to the conversion subdir, then remotely.
     thumb_filename = 'thumbnail' + extension
     thumb_filepath = create_pub_filepath(entry, thumb_filename)
+
     tmp_thumb_filename = os.path.join(
         conversions_subdir, thumb_filename)
+
     with file(tmp_thumb_filename, 'w') as thumb_file:
         thumb.save(thumb_file)
+
     mgg.public_store.copy_local_to_storage(
         tmp_thumb_filename, thumb_filepath)
 
@@ -106,42 +72,78 @@ def process_image(entry):
     # file, a `medium.jpg` files is created and later associated with the media
     # entry.
     medium = Image.open(queued_filename)
-    medium_processed = False
+
+    # Fix orientation
+    medium = exif_fix_image_orientation(medium, exif_tags)
 
     if medium.size[0] > MEDIUM_SIZE[0] or medium.size[1] > MEDIUM_SIZE[1]:
         medium.thumbnail(MEDIUM_SIZE, Image.ANTIALIAS)
 
-        medium_filename = 'medium' + extension
-        medium_filepath = create_pub_filepath(entry, medium_filename)
-        tmp_medium_filename = os.path.join(
-            conversions_subdir, medium_filename)
+    medium_filename = 'medium' + extension
+    medium_filepath = create_pub_filepath(entry, medium_filename)
 
-        with file(tmp_medium_filename, 'w') as medium_file:
-            medium.save(medium_file)
+    tmp_medium_filename = os.path.join(
+        conversions_subdir, medium_filename)
 
-        mgg.public_store.copy_local_to_storage(
-            tmp_medium_filename, medium_filepath)
+    with file(tmp_medium_filename, 'w') as medium_file:
+        medium.save(medium_file)
 
-        medium_processed = True
+    mgg.public_store.copy_local_to_storage(
+        tmp_medium_filename, medium_filepath)
 
     # we have to re-read because unlike PIL, not everything reads
     # things in string representation :)
     queued_file = file(queued_filename, 'rb')
 
     with queued_file:
-        original_filepath = create_pub_filepath(entry, queued_filepath[-1])
+        #create_pub_filepath(entry, queued_filepath[-1])
+        original_filepath = create_pub_filepath(entry, basename + extension) 
 
         with mgg.public_store.get_file(original_filepath, 'wb') \
             as original_file:
             original_file.write(queued_file.read())
 
+    # Remove queued media file from storage and database
     mgg.queue_store.delete_file(queued_filepath)
-    entry['queued_media_file'] = []
+    entry.queued_media_file = []
+
+    # Insert media file information into database
     media_files_dict = entry.setdefault('media_files', {})
     media_files_dict['thumb'] = thumb_filepath
     media_files_dict['original'] = original_filepath
-    if medium_processed:
-        media_files_dict['medium'] = medium_filepath
+    media_files_dict['medium'] = medium_filepath
+
+    # Insert exif data into database
+    exif_all = clean_exif(exif_tags)
+    media_data = entry.setdefault('media_data', {})
+
+    # TODO: Fix for sql media_data, when exif is in sql
+    if media_data is not None:
+        media_data['exif'] = {
+            'clean': exif_all}
+        media_data['exif']['useful'] = get_useful(exif_all)
+
+    if len(exif_all):
+        entry.media_data_init(exif_all=exif_all)
+
+    if len(gps_data):
+        for key in list(gps_data.keys()):
+            gps_data['gps_' + key] = gps_data.pop(key)
+        entry.media_data_init(**gps_data)
 
     # clean up workbench
     workbench.destroy_self()
+
+if __name__ == '__main__':
+    import sys
+    import pprint
+
+    pp = pprint.PrettyPrinter()
+
+    result = extract_exif(sys.argv[1])
+    gps = get_gps_data(result)
+    clean = clean_exif(result)
+    useful = get_useful(clean)
+
+    print pp.pprint(
+        clean)