import logging
from mediagoblin import mg_globals as mgg
-from mediagoblin.decorators import get_workbench
from mediagoblin.processing import BadMediaFail, \
create_pub_filepath, FilenameBuilder
from mediagoblin.tools.exif import exif_fix_image_orientation, \
return False
-@get_workbench
-def process_image(entry, workbench=None):
+def process_image(entry):
"""Code to process an image. Will be run by celery.
A Workbench() represents a local tempory dir. It is automatically
cleaned up when this function exits.
"""
+ proc_state = entry.proc_state
+ workbench = proc_state.workbench
+
# Conversions subdirectory to avoid collisions
conversions_subdir = os.path.join(
workbench.dir, 'conversions')
os.mkdir(conversions_subdir)
- queued_filepath = entry.queued_media_file
- queued_filename = workbench.localized_file(
- mgg.queue_store, queued_filepath,
- 'source')
+
+ queued_filename = proc_state.get_queued_filename()
name_builder = FilenameBuilder(queued_filename)
# EXIF extraction
mgg.public_store.copy_local_to_storage(queued_filename, original_filepath)
# Remove queued media file from storage and database
- mgg.queue_store.delete_file(queued_filepath)
- entry.queued_media_file = []
+ proc_state.delete_queue_file()
# Insert media file information into database
media_files_dict = entry.setdefault('media_files', {})
ext=self.ext)
+class ProcessingState(object):
+ def __init__(self, entry):
+ self.entry = entry
+ self.workbench = None
+ self.queued_filename = None
+
+ # Monkey patch us onto the entry
+ entry.proc_state = self
+
+ def set_workbench(self, wb):
+ self.workbench = wb
+
+ def get_queued_filename(self):
+ """
+ Get the a filename for the original, on local storage
+ """
+ if self.queued_filename is not None:
+ return self.queued_filename
+ queued_filepath = self.entry.queued_media_file
+ queued_filename = self.workbench.localized_file(
+ mgg.queue_store, queued_filepath,
+ 'source')
+ self.queued_filename = queued_filename
+ return queued_filename
+
+ def delete_queue_file(self):
+ queued_filepath = self.entry.queued_media_file
+ mgg.queue_store.delete_file(queued_filepath)
+ self.entry.queued_media_file = []
+
+
def mark_entry_failed(entry_id, exc):
"""
Mark a media entry as having failed in its conversion.
from mediagoblin import mg_globals as mgg
from mediagoblin.db.models import MediaEntry
-from mediagoblin.processing import mark_entry_failed, BaseProcessingFail
+from . import mark_entry_failed, BaseProcessingFail, ProcessingState
from mediagoblin.tools.processing import json_processing_callback
_log = logging.getLogger(__name__)
_log.debug('Processing {0}'.format(entry))
- # run the processing code
- entry.media_manager['processor'](entry)
+ proc_state = ProcessingState(entry)
+ with mgg.workbench_manager.create() as workbench:
+ proc_state.set_workbench(workbench)
+ # run the processing code
+ entry.media_manager['processor'](entry)
# We set the state to processed and save the entry here so there's
# no need to save at the end of the processing stage, probably ;)