X-Git-Url: https://vcs.fsf.org/?a=blobdiff_plain;f=mediagoblin%2Fgmg_commands%2Fimport_export.py;h=4ec17d4783d2df046ee785398d635605595f83c3;hb=7d98005a6b2469134adcf84b7a7417a24968bd8d;hp=f6651327a9e6c2c687cc02a42d20813b391d4ade;hpb=8f12c9b24cb6fef5d7cc332e5e5e8d5587ba38e0;p=mediagoblin.git diff --git a/mediagoblin/gmg_commands/import_export.py b/mediagoblin/gmg_commands/import_export.py index f6651327..4ec17d47 100644 --- a/mediagoblin/gmg_commands/import_export.py +++ b/mediagoblin/gmg_commands/import_export.py @@ -1,5 +1,5 @@ # GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011 Free Software Foundation, Inc +# Copyright (C) 2011 MediaGoblin contributors. See AUTHORS. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -14,32 +14,30 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -from mediagoblin.gmg_commands import util as commands_util from mediagoblin import mg_globals -from mediagoblin.db import util as db_util from mediagoblin.db.open import setup_connection_and_db_from_config -from mediagoblin.init.config import read_mediagoblin_config -from mediagoblin import util as mg_util -from mediagoblin.storage import BasicFileStorage +from mediagoblin.storage.filestorage import BasicFileStorage from mediagoblin.init import setup_storage, setup_global_and_app_config -import shlex import shutil import tarfile +import tempfile import subprocess import os.path import os -import re import sys +import logging +from contextlib import closing + +_log = logging.getLogger('gmg.import_export') +logging.basicConfig() +_log.setLevel(logging.INFO) def import_export_parse_setup(subparser): # TODO: Add default subparser.add_argument( 'tar_file') - subparser.add_argument( - '-cf', '--conf_file', default='mediagoblin.ini', - help='Config file used to set up environment') subparser.add_argument( '--mongodump_path', default='mongodump', help='mongodump binary') @@ -47,17 +45,17 @@ def import_export_parse_setup(subparser): '--mongorestore_path', default='mongorestore', help='mongorestore binary') subparser.add_argument( - '--cache_path', default='/tmp/mediagoblin/', - help='') + '--cache_path', + help='Temporary directory where files will be temporarily dumped') def _import_media(db, args): - """ + ''' Import media files Must be called after _import_database() - """ - print "\n== Importing media ==\n" + ''' + _log.info('-> Importing media...') media_cache = BasicFileStorage( args._cache_path['media']) @@ -68,47 +66,48 @@ def _import_media(db, args): for entry in db.media_entries.find(): for name, path in entry['media_files'].items(): + _log.info('Importing: {0} - {1}'.format( + entry['title'], + name)) + media_file = mg_globals.public_store.get_file(path, mode='wb') media_file.write( media_cache.get_file(path, mode='rb').read()) - print(media_file) - print(entry) - - print "\n== Media imported ==\n" + _log.info('...Media imported') def _import_database(db, args): - """ + ''' Restore mongo database from ___.bson files - """ - print "\n== Importing database ==\n" + ''' + _log.info('-> Importing database...') p = subprocess.Popen([ args.mongorestore_path, '-d', db.name, os.path.join(args._cache_path['database'], db.name)]) - - print p p.wait() - print "\n== Database imported ==\n" + _log.info('...Database imported') def env_import(args): - """ + ''' Restore mongo database and media files from a tar archive - """ - # args.cache_path += 'mediagoblin-data' + ''' + if not args.cache_path: + args.cache_path = tempfile.mkdtemp() + setup_global_and_app_config(args.conf_file) # Creates mg_globals.public_store and mg_globals.queue_store setup_storage() - config, validation_result = read_mediagoblin_config(args.conf_file) + global_config, app_config = setup_global_and_app_config(args.conf_file) connection, db = setup_connection_and_db_from_config( - config['mediagoblin'], use_pymongo=True) + app_config, use_pymongo=True) tf = tarfile.open( args.tar_file, @@ -116,7 +115,8 @@ def env_import(args): tf.extractall(args.cache_path) - args.cache_path += 'mediagoblin-data' + args.cache_path = os.path.join( + args.cache_path, 'mediagoblin-data') args = _setup_paths(args) # Import database from extracted data @@ -124,12 +124,13 @@ def env_import(args): _import_media(db, args) - # _clean(args) + _clean(args) + def _setup_paths(args): - """ + ''' Populate ``args`` variable with cache subpaths - """ + ''' args._cache_path = dict() PATH_MAP = { 'media': 'media', @@ -143,58 +144,47 @@ def _setup_paths(args): def _create_archive(args): - """ + ''' Create the tar archive - """ - print "\n== Compressing to archive ==\n" + ''' + _log.info('-> Compressing to archive') tf = tarfile.open( args.tar_file, mode='w|gz') - with tf: + with closing(tf): tf.add(args.cache_path, 'mediagoblin-data/') - print "\n== Archiving done ==\n" + _log.info('...Archiving done') def _clean(args): - """ + ''' Remove cache directory - """ + ''' shutil.rmtree(args.cache_path) def _export_check(args): - """ + ''' Run security checks for export command - """ + ''' if os.path.exists(args.tar_file): overwrite = raw_input( 'The output file already exists. ' 'Are you **SURE** you want to overwrite it? ' '(yes/no)> ') if not overwrite == 'yes': - print "Aborting." + print 'Aborting.' return False - if os.path.exists(args.cache_path): - print 'The cache directory must not exist before you run this script' - print 'Cache directory: ', args.cache_path - - return False - return True def _export_database(db, args): - print "\n== Exporting database ==\n" - - command = '{mongodump_path} -d {database} -o {mongodump_cache}'.format( - mongodump_path=args.mongodump_path, - database=db.name, - mongodump_cache=args._cache_path['database']) + _log.info('-> Exporting database...') p = subprocess.Popen([ args.mongodump_path, @@ -203,11 +193,11 @@ def _export_database(db, args): p.wait() - print "\n== Database exported ==\n" + _log.info('...Database exported') def _export_media(db, args): - print "\n== Exporting media ==\n" + _log.info('-> Exporting media...') media_cache = BasicFileStorage( args._cache_path['media']) @@ -218,32 +208,45 @@ def _export_media(db, args): for entry in db.media_entries.find(): for name, path in entry['media_files'].items(): - mc_file = media_cache.get_file(path, mode='wb') - mc_file.write( - mg_globals.public_store.get_file(path, mode='rb').read()) + _log.info(u'Exporting {0} - {1}'.format( + entry['title'], + name)) + try: + mc_file = media_cache.get_file(path, mode='wb') + mc_file.write( + mg_globals.public_store.get_file(path, mode='rb').read()) + except e: + _log.error('Failed: {0}'.format(e)) - print(mc_file) - print(entry) - - print "\n== Media exported ==\n" + _log.info('...Media exported') def env_export(args): - """ + ''' Export database and media files to a tar archive - """ + ''' + if args.cache_path: + if os.path.exists(args.cache_path): + _log.error('The cache directory must not exist ' + 'before you run this script') + _log.error('Cache directory: {0}'.format(args.cache_path)) + + return False + else: + args.cache_path = tempfile.mkdtemp() + args = _setup_paths(args) if not _export_check(args): - print "\n== Checks did not pass, exiting ==\n" + _log.error('Checks did not pass, exiting') sys.exit(0) - setup_global_and_app_config(args.conf_file) + globa_config, app_config = setup_global_and_app_config(args.conf_file) + setup_storage() - config, validation_result = read_mediagoblin_config(args.conf_file) connection, db = setup_connection_and_db_from_config( - config['mediagoblin'], use_pymongo=True) + app_config, use_pymongo=True) _export_database(db, args)