help='')
-def _export_database(db, args):
- print "\n== Exporting database ==\n"
-
- command = '{mongodump_path} -d {database} -o {mongodump_cache}'.format(
- mongodump_path=args.mongodump_path,
- database=db.name,
- mongodump_cache=args._cache_path['database'])
-
- p = subprocess.Popen(
- shlex.split(command))
-
- p.wait()
-
- print "\n== Database exported ==\n"
-
-
-def _export_media(db, args):
- print "\n== Exporting media ==\n"
-
- media_cache = BasicFileStorage(
- args._cache_path['media'])
-
- # TODO: Add export of queue files
- queue_cache = BasicFileStorage(
- args._cache_path['queue'])
-
- for entry in db.media_entries.find():
- for name, path in entry['media_files'].items():
- mc_file = media_cache.get_file(path, mode='wb')
- mc_file.write(
- mg_globals.public_store.get_file(path, mode='rb').read())
-
- print(mc_file)
- print(entry)
-
- print "\n== Media exported ==\n"
-
-
def _import_media(db, args):
"""
Import media files
def _import_database(db, args):
+ """
+ Restore mongo database from ___.bson files
+ """
print "\n== Importing database ==\n"
- command = '{mongorestore_path} -d {database}'
- '{backup_dir}/{database}'.format(
- mongorestore_path=args.mongorestore_path,
- database=db.name,
- backup_dir=args._cache_path['database'])
- print command
-
- p = subprocess.Popen(
- shlex.split(command))
+ p = subprocess.Popen([
+ args.mongorestore_path,
+ '-d', db.name,
+ os.path.join(args._cache_path['database'], db.name)])
+
+ print p
p.wait()
+ print "\n== Database imported ==\n"
-def env_import(args):
- args.cache_path += 'mediagoblin-data'
- args = _setup_paths(args)
+def env_import(args):
+ """
+ Restore mongo database and media files from a tar archive
+ """
+ # args.cache_path += 'mediagoblin-data'
setup_global_and_app_config(args.conf_file)
+
+ # Creates mg_globals.public_store and mg_globals.queue_store
setup_storage()
config, validation_result = read_mediagoblin_config(args.conf_file)
tf.extractall(args.cache_path)
+ args.cache_path += 'mediagoblin-data'
+ args = _setup_paths(args)
+
# Import database from extracted data
_import_database(db, args)
_import_media(db, args)
+ # _clean(args)
def _setup_paths(args):
+ """
+ Populate ``args`` variable with cache subpaths
+ """
args._cache_path = dict()
PATH_MAP = {
'media': 'media',
def _create_archive(args):
+ """
+ Create the tar archive
+ """
print "\n== Compressing to archive ==\n"
tf = tarfile.open(
def _clean(args):
+ """
+ Remove cache directory
+ """
shutil.rmtree(args.cache_path)
-def _check(args):
+def _export_check(args):
+ """
+ Run security checks for export command
+ """
if os.path.exists(args.tar_file):
overwrite = raw_input(
'The output file already exists. '
return True
+def _export_database(db, args):
+ print "\n== Exporting database ==\n"
+
+ command = '{mongodump_path} -d {database} -o {mongodump_cache}'.format(
+ mongodump_path=args.mongodump_path,
+ database=db.name,
+ mongodump_cache=args._cache_path['database'])
+
+ p = subprocess.Popen([
+ args.mongodump_path,
+ '-d', db.name,
+ '-o', args._cache_path['database']])
+
+ p.wait()
+
+ print "\n== Database exported ==\n"
+
+
+def _export_media(db, args):
+ print "\n== Exporting media ==\n"
+
+ media_cache = BasicFileStorage(
+ args._cache_path['media'])
+
+ # TODO: Add export of queue files
+ queue_cache = BasicFileStorage(
+ args._cache_path['queue'])
+
+ for entry in db.media_entries.find():
+ for name, path in entry['media_files'].items():
+ mc_file = media_cache.get_file(path, mode='wb')
+ mc_file.write(
+ mg_globals.public_store.get_file(path, mode='rb').read())
+
+ print(mc_file)
+ print(entry)
+
+ print "\n== Media exported ==\n"
+
+
def env_export(args):
+ """
+ Export database and media files to a tar archive
+ """
args = _setup_paths(args)
- if not _check(args):
+ if not _export_check(args):
print "\n== Checks did not pass, exiting ==\n"
sys.exit(0)