import Image
import os
+import logging
from mediagoblin import mg_globals as mgg
+from mediagoblin.decorators import get_workbench
from mediagoblin.processing import BadMediaFail, \
- create_pub_filepath, THUMB_SIZE, MEDIUM_SIZE
+ create_pub_filepath, FilenameBuilder
from mediagoblin.tools.exif import exif_fix_image_orientation, \
- extract_exif, clean_exif, get_gps_data, get_useful
+ extract_exif, clean_exif, get_gps_data, get_useful, \
+ exif_image_needs_rotation
-MAX_FILENAME_LENGTH = 255 # the limit in VFAT -- seems like a good baseline
+_log = logging.getLogger(__name__)
-def resize_image(entry, filename, basename, file_tail, exif_tags, workdir,
- new_size, size_limits=(0, 0)):
- """Store a resized version of an image and return its pathname.
+
+def resize_image(entry, filename, new_path, exif_tags, workdir, new_size,
+ size_limits=(0, 0)):
+ """
+ Store a resized version of an image and return its pathname.
Arguments:
entry -- the entry for the image to resize
filename -- the filename of the original image being resized
- basename -- simple basename of the given filename
- file_tail -- ending string and extension for the resized filename
+ new_path -- public file path for the new resized image
exif_tags -- EXIF data for the original image
workdir -- directory path for storing converted image files
new_size -- 2-tuple size for the resized image
- size_limits (optional) -- image is only resized if it exceeds this size
-
"""
try:
resized = Image.open(filename)
except IOError:
raise BadMediaFail()
resized = exif_fix_image_orientation(resized, exif_tags) # Fix orientation
-
- if ((resized.size[0] > size_limits[0]) or
- (resized.size[1] > size_limits[1])):
- resized.thumbnail(new_size, Image.ANTIALIAS)
-
- # Truncate basename as needed so len(basename + file_tail) <= 255
- resized_filename = (basename[:MAX_FILENAME_LENGTH - len(file_tail)] +
- file_tail)
- resized_filepath = create_pub_filepath(entry, resized_filename)
+ resized.thumbnail(new_size, Image.ANTIALIAS)
# Copy the new file to the conversion subdir, then remotely.
- tmp_resized_filename = os.path.join(workdir, resized_filename)
+ tmp_resized_filename = os.path.join(workdir, new_path[-1])
with file(tmp_resized_filename, 'w') as resized_file:
resized.save(resized_file)
- mgg.public_store.copy_local_to_storage(
- tmp_resized_filename, resized_filepath)
- return resized_filepath
+ mgg.public_store.copy_local_to_storage(tmp_resized_filename, new_path)
-def process_image(entry):
- """
- Code to process an image
+
+SUPPORTED_FILETYPES = ['png', 'gif', 'jpg', 'jpeg']
+
+
+def sniff_handler(media_file, **kw):
+ if kw.get('media') is not None: # That's a double negative!
+ name, ext = os.path.splitext(kw['media'].filename)
+ clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase
+
+ if clean_ext in SUPPORTED_FILETYPES:
+ _log.info('Found file extension in supported filetypes')
+ return True
+ else:
+ _log.debug('Media present, extension not found in {0}'.format(
+ SUPPORTED_FILETYPES))
+ else:
+ _log.warning('Need additional information (keyword argument \'media\')'
+ ' to be able to handle sniffing')
+
+ return False
+
+
+@get_workbench
+def process_image(entry, workbench=None):
+ """Code to process an image. Will be run by celery.
+
+ A Workbench() represents a local tempory dir. It is automatically
+ cleaned up when this function exits.
"""
- workbench = mgg.workbench_manager.create_workbench()
# Conversions subdirectory to avoid collisions
conversions_subdir = os.path.join(
workbench.dir, 'conversions')
os.mkdir(conversions_subdir)
-
queued_filepath = entry.queued_media_file
queued_filename = workbench.localized_file(
mgg.queue_store, queued_filepath,
'source')
-
- filename_bits = os.path.splitext(queued_filename)
- basename = os.path.split(filename_bits[0])[1]
- extension = filename_bits[1].lower()
+ name_builder = FilenameBuilder(queued_filename)
# EXIF extraction
exif_tags = extract_exif(queued_filename)
gps_data = get_gps_data(exif_tags)
# Always create a small thumbnail
- thumb_filepath = resize_image(entry, queued_filename, basename,
- '.thumbnail' + extension, exif_tags,
- conversions_subdir, THUMB_SIZE)
+ thumb_filepath = create_pub_filepath(
+ entry, name_builder.fill('{basename}.thumbnail{ext}'))
+ resize_image(entry, queued_filename, thumb_filepath,
+ exif_tags, conversions_subdir,
+ (mgg.global_config['media:thumb']['max_width'],
+ mgg.global_config['media:thumb']['max_height']))
# If the size of the original file exceeds the specified size of a `medium`
# file, a `.medium.jpg` files is created and later associated with the media
# entry.
- medium_filepath = resize_image(entry, queued_filename, basename,
- '.medium' + extension, exif_tags,
- conversions_subdir, MEDIUM_SIZE, MEDIUM_SIZE)
-
- # we have to re-read because unlike PIL, not everything reads
- # things in string representation :)
- queued_file = file(queued_filename, 'rb')
-
- with queued_file:
- #create_pub_filepath(entry, queued_filepath[-1])
- original_filepath = create_pub_filepath(entry, basename + extension)
-
- with mgg.public_store.get_file(original_filepath, 'wb') \
- as original_file:
- original_file.write(queued_file.read())
+ medium = Image.open(queued_filename)
+ if medium.size[0] > mgg.global_config['media:medium']['max_width'] \
+ or medium.size[1] > mgg.global_config['media:medium']['max_height'] \
+ or exif_image_needs_rotation(exif_tags):
+ medium_filepath = create_pub_filepath(
+ entry, name_builder.fill('{basename}.medium{ext}'))
+ resize_image(
+ entry, queued_filename, medium_filepath,
+ exif_tags, conversions_subdir,
+ (mgg.global_config['media:medium']['max_width'],
+ mgg.global_config['media:medium']['max_height']))
+ else:
+ medium_filepath = None
+
+ # Copy our queued local workbench to its final destination
+ original_filepath = create_pub_filepath(
+ entry, name_builder.fill('{basename}{ext}'))
+ mgg.public_store.copy_local_to_storage(queued_filename, original_filepath)
# Remove queued media file from storage and database
mgg.queue_store.delete_file(queued_filepath)
# Insert media file information into database
media_files_dict = entry.setdefault('media_files', {})
- media_files_dict['thumb'] = thumb_filepath
- media_files_dict['original'] = original_filepath
- media_files_dict['medium'] = medium_filepath
+ media_files_dict[u'thumb'] = thumb_filepath
+ media_files_dict[u'original'] = original_filepath
+ if medium_filepath:
+ media_files_dict[u'medium'] = medium_filepath
# Insert exif data into database
- media_data = entry.setdefault('media_data', {})
+ exif_all = clean_exif(exif_tags)
- # TODO: Fix for sql media_data, when exif is in sql
- if media_data is not None:
- media_data['exif'] = {
- 'clean': clean_exif(exif_tags)}
- media_data['exif']['useful'] = get_useful(
- media_data['exif']['clean'])
+ if len(exif_all):
+ entry.media_data_init(exif_all=exif_all)
if len(gps_data):
for key in list(gps_data.keys()):
gps_data['gps_' + key] = gps_data.pop(key)
entry.media_data_init(**gps_data)
- # clean up workbench
- workbench.destroy_self()
if __name__ == '__main__':
import sys