import cloudfiles
import mimetypes
+import logging
+
+_log = logging.getLogger(__name__)
class CloudFilesStorage(StorageInterface):
mimetypes.add_type("video/webm", "webm")
if not self.param_host:
- print('No CloudFiles host URL specified, '
+ _log.info('No CloudFiles host URL specified, '
'defaulting to Rackspace US')
self.connection = cloudfiles.get_connection(
servicenet=True if self.param_use_servicenet == 'true' or \
self.param_use_servicenet == True else False)
+ _log.debug('Connected to {0} (auth: {1})'.format(
+ self.connection.connection.host,
+ self.connection.auth.host))
+
if not self.param_container == \
self.connection.get_container(self.param_container):
self.container = self.connection.create_container(
self.container = self.connection.get_container(
self.param_container)
+ _log.debug('Container: {0}'.format(
+ self.container.name))
+
self.container_uri = self.container.public_uri()
def _resolve_filepath(self, filepath):
def file_exists(self, filepath):
try:
- self.container.get_object( self._resolve_filepath(filepath))
+ self.container.get_object(self._resolve_filepath(filepath))
return True
except cloudfiles.errors.NoSuchObject:
return False
def get_file(self, filepath, *args, **kwargs):
"""
- - Doesn't care about the "mode" argument
+ - Doesn't care about the "mode" argument.
"""
try:
obj = self.container.get_object(
obj = self.container.create_object(
self._resolve_filepath(filepath))
+ # Detect the mimetype ourselves, since some extensions (webm)
+ # may not be universally accepted as video/webm
mimetype = mimetypes.guess_type(
filepath[-1])
if mimetype:
+ # Set the mimetype on the CloudFiles object
obj.content_type = mimetype[0]
- # this should finally fix the bug #429
- meta_data = {'mime-type' : mimetype[0]}
- obj.metadata = meta_data
+ obj.metadata = {'mime-type': mimetype[0]}
return CloudFilesStorageObjectWrapper(obj, *args, **kwargs)
pass
finally:
pass
-
def file_url(self, filepath):
return '/'.join([
Wrapper for python-cloudfiles's cloudfiles.storage_object.Object
used to circumvent the mystic `medium.jpg` corruption issue, where
we had both python-cloudfiles and PIL doing buffering on both
- ends and that breaking things.
+ ends and causing breakage.
This wrapper currently meets mediagoblin's needs for a public_store
file-like object.
self.storage_object = storage_object
def read(self, *args, **kwargs):
+ _log.debug('Reading {0}'.format(
+ self.storage_object.name))
return self.storage_object.read(*args, **kwargs)
def write(self, data, *args, **kwargs):
However if we should need it it would be easy implement.
"""
if self.storage_object.size and type(data) == str:
+ _log.debug('{0} is > 0 in size, appending data'.format(
+ self.storage_object.name))
data = self.read() + data
+ _log.debug('Writing {0}'.format(
+ self.storage_object.name))
self.storage_object.write(data, *args, **kwargs)
def close(self):
+ """
+ Not implemented.
+ """
pass
def __enter__(self):