- Added progress meter for video and audio media types.
- Changed the __repr__ method of a MediaEntry to display a bit more
useful explanation.
- Added a new MediaEntry.state, 'processing', which means that the task
is running the processor on the item currently.
- Fixed some PEP8 issues in user_pages/views.py
- Fixed the ATOM TAG URI to show the correct year.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from sqlalchemy import MetaData, Table, Column, Boolean
+from sqlalchemy import MetaData, Table, Column, Boolean, SmallInteger
from mediagoblin.db.sql.util import RegisterMigration
default=True, nullable=True)
col.create(users, populate_defaults=True)
db_conn.commit()
+
+
+@RegisterMigration(3, MIGRATIONS)
+def add_transcoding_progress(db_conn):
+ metadata = MetaData(bind=db_conn.bind)
+
+ media_entry = Table('core__media_entries', metadata, autoload=True,
+ autoload_with=db_conn.bind)
+
+ col = Column('transcoding_progress', SmallInteger)
+ col.create(media_entry)
+ db_conn.commit()
fail_error = Column(Unicode)
fail_metadata = Column(JSONEncoded)
+ transcoding_progress = Column(SmallInteger)
+
queued_media_file = Column(PathTupleWithSlashes)
queued_task_id = Column(Unicode)
__import__(models_module)
return sys.modules[models_module].DATA_MODEL
+ def __repr__(self):
+ return '<{classname} {id}: {title}>'.format(
+ classname=self.__class__.__name__,
+ id=self.id,
+ title=self.title)
+
class FileKeynames(Base):
"""
from mediagoblin import mg_globals as mgg
from mediagoblin.processing import (create_pub_filepath, BadMediaFail,
- FilenameBuilder)
+ FilenameBuilder, ProgressCallback)
from mediagoblin.media_types.audio.transcoders import (AudioTranscoder,
AudioThumbnailer)
_log = logging.getLogger(__name__)
+
def sniff_handler(media_file, **kw):
try:
transcoder = AudioTranscoder()
return False
+
def process_audio(entry):
audio_config = mgg.global_config['media_type:mediagoblin.media_types.audio']
transcoder = AudioTranscoder()
with tempfile.NamedTemporaryFile() as webm_audio_tmp:
+ progress_callback = ProgressCallback(entry)
transcoder.transcode(
queued_filename,
webm_audio_tmp.name,
- quality=audio_config['quality'])
+ quality=audio_config['quality'],
+ progress_callback=progress_callback)
transcoder.discover(webm_audio_tmp.name)
data = dict(message.structure)
if self.__on_progress:
- self.__on_progress(data)
+ self.__on_progress(data.get('percent'))
_log.info('{0}% done...'.format(
data.get('percent')))
from mediagoblin import mg_globals as mgg
from mediagoblin.processing import \
- create_pub_filepath, FilenameBuilder, BaseProcessingFail
+ create_pub_filepath, FilenameBuilder, BaseProcessingFail, ProgressCallback
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
from . import transcoders
with tmp_dst:
# Transcode queued file to a VP8/vorbis file that fits in a 640x640 square
+ progress_callback = ProgressCallback(entry)
transcoder = transcoders.VideoTranscoder()
transcoder.transcode(queued_filename, tmp_dst.name,
vp8_quality=video_config['vp8_quality'],
vp8_threads=video_config['vp8_threads'],
- vorbis_quality=video_config['vorbis_quality'])
+ vorbis_quality=video_config['vorbis_quality'],
+ progress_callback=progress_callback)
# Push transcoded video to public storage
_log.debug('Saving medium...')
data = dict(message.structure)
if self._progress_callback:
- self._progress_callback(data)
+ self._progress_callback(data.get('percent'))
_log.info('{percent}% done...'.format(
percent=data.get('percent')))
_log = logging.getLogger(__name__)
+class ProgressCallback(object):
+ def __init__(self, entry):
+ self.entry = entry
+
+ def __call__(self, progress):
+ if progress:
+ self.entry.transcoding_progress = progress
+ self.entry.save()
+
+
def create_pub_filepath(entry, filename):
return mgg.public_store.get_unique_filepath(
['media_entries',
unicode(entry._id),
filename])
+
class FilenameBuilder(object):
"""Easily slice and dice filenames.
entry = mgg.database.MediaEntry.one(
{'_id': ObjectId(media_id)})
- _log.info('Running task {0} on media {1}: {2}'.format(
- self.name,
- entry._id,
- entry.title))
-
# Try to process, and handle expected errors.
try:
- #__import__(entry.media_type)
manager = get_media_manager(entry.media_type)
+
+ entry.state = u'processing'
+ entry.save()
+
_log.debug('Processing {0}'.format(entry))
+
manager['processor'](entry)
+
+ entry.state = u'processed'
+ entry.save()
+
except BaseProcessingFail as exc:
mark_entry_failed(entry._id, exc)
return
+
except ImportError as exc:
_log.error(
'Entry {0} failed to process due to an import error: {1}'\
mark_entry_failed(entry._id, exc)
- entry.state = u'processed'
- entry.save()
-
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""
If the processing failed we should mark that in the database.
<th>ID</th>
<th>Title</th>
<th>When submitted</th>
- <th>Status</th>
+ <th>Transcoding progress</th>
</tr>
{% for media_entry in processing_entries %}
<tr>
<td>{{ media_entry._id }}</td>
<td>{{ media_entry.title }}</td>
<td>{{ media_entry.created.strftime("%m-%d-%Y %I:%M %p") }}</td>
- <td></td>
+ {% if media_entry.transcoding_progress %}
+ <td>{{ media_entry.transcoding_progress }}%</td>
+ {% else %}
+ <td>Unknown</td>
+ {% endif %}
</tr>
{% endfor %}
</table>
{% else %}
- <p><i>{% trans %}No media in-processing{% endtrans %}</i></p>
+ <p><em>{% trans %}No media in-processing{% endtrans %}</em></p>
{% endif %}
{% if failed_entries.count() %}
</tr>
{% endfor %}
</table>
-{% endif %}
+{% else %}
+ <p><em>{% trans %}No failed entries!{% endtrans %}</em></p>
+{% endif %}
+{% if processed_entries.count() %}
+ <h2>{% trans %}Your last 10 successful uploads{% endtrans %}</h2>
+
+ <table class="media_panel processed">
+ <tr>
+ <th>ID</th>
+ <th>Title</th>
+ <th>Submitted</th>
+ </tr>
+ {% for entry in processed_entries %}
+ <tr>
+ <td>{{ entry._id }}</td>
+ <td><a href="{{ entry.url_for_self(request.urlgen) }}">{{ entry.title }}</a></td>
+ <td>{{ entry.created.strftime("%m-%d-%Y %I:%M %p") }}</td>
+ </tr>
+ {% endfor %}
+ </table>
+{% else %}
+ <p><em>{% trans %}No processed entries, yet!{% endtrans %}</em></p>
+{% endif %}
{% endblock %}
from webob import exc
import logging
+import datetime
from mediagoblin import messages, mg_globals
from mediagoblin.db.util import DESCENDING, ObjectId
_log = logging.getLogger(__name__)
_log.setLevel(logging.DEBUG)
+
@uses_pagination
def user_home(request, page):
"""'Homepage' of a User()"""
atomlinks = [{
'href': request.urlgen(
'mediagoblin.user_pages.user_home',
- qualified=True,user=request.matchdict['user']),
+ qualified=True, user=request.matchdict['user']),
'rel': 'alternate',
'type': 'text/html'
- }];
+ }]
+
if mg_globals.app_config["push_urls"]:
for push_url in mg_globals.app_config["push_urls"]:
atomlinks.append({
feed = AtomFeed(
"MediaGoblin: Feed for user '%s'" % request.matchdict['user'],
feed_url=request.url,
- id='tag:'+request.host+',2011:gallery.user-'+request.matchdict['user'],
+ id='tag:{host},{year}:gallery.user-{user}'.format(
+ host=request.host,
+ year=datetime.datetime.today().strftime('%Y'),
+ user=request.matchdict['user']),
links=atomlinks)
-
for entry in cursor:
feed.add(entry.get('title'),
entry.description_html,
- id=entry.url_for_self(request.urlgen,qualified=True),
+ id=entry.url_for_self(request.urlgen, qualified=True),
content_type='html',
author={
'name': entry.get_uploader.username,
# Get media entries which are in-processing
processing_entries = request.db.MediaEntry.find(
{'uploader': user._id,
- 'state': u'unprocessed'}).sort('created', DESCENDING)
+ 'state': u'processing'}).sort('created', DESCENDING)
# Get media entries which have failed to process
failed_entries = request.db.MediaEntry.find(
{'uploader': user._id,
'state': u'failed'}).sort('created', DESCENDING)
+ processed_entries = request.db.MediaEntry.find(
+ {'uploader': user._id,
+ 'state': u'processed'}).sort('created', DESCENDING).limit(10)
+
# Render to response
return render_to_response(
request,
'mediagoblin/user_pages/processing_panel.html',
{'user': user,
'processing_entries': processing_entries,
- 'failed_entries': failed_entries})
+ 'failed_entries': failed_entries,
+ 'processed_entries': processed_entries})