from mediagoblin.init import setup_storage, setup_global_and_app_config
import shlex
+import shutil
import tarfile
import subprocess
import os.path
import os
import re
+import sys
+
def import_export_parse_setup(subparser):
# TODO: Add default
'--cache_path', default='/tmp/mediagoblin/',
help='')
+
def _export_database(db, args):
print "\n== Exporting database ==\n"
-
+
command = '{mongodump_path} -d {database} -o {mongodump_cache}'.format(
mongodump_path=args.mongodump_path,
database=db.name,
mongodump_cache=args._cache_path['database'])
-
+
p = subprocess.Popen(
shlex.split(command))
-
+
p.wait()
print "\n== Database exported ==\n"
+
def _export_media(db, args):
-
print "\n== Exporting media ==\n"
-
+
media_cache = BasicFileStorage(
args._cache_path['media'])
+ # TODO: Add export of queue files
+ queue_cache = BasicFileStorage(
+ args._cache_path['queue'])
+
for entry in db.media_entries.find():
for name, path in entry['media_files'].items():
mc_file = media_cache.get_file(path, mode='wb')
print(mc_file)
print(entry)
+ print "\n== Media exported ==\n"
+
+
+def _import_media(db, args):
+ """
+ Import media files
+
+ Must be called after _import_database()
+ """
+ print "\n== Importing media ==\n"
+
+ media_cache = BasicFileStorage(
+ args._cache_path['media'])
+
+ # TODO: Add import of queue files
queue_cache = BasicFileStorage(
args._cache_path['queue'])
- qc_file = queue_cache.get_file(entry['queued_media_file'], mode='wb')
- qc_file.write(
- mg_globals.queue_store.get_file(entry['queued_media_file'], mode='rb').read())
- print(qc_file)
+ for entry in db.media_entries.find():
+ for name, path in entry['media_files'].items():
+ media_file = mg_globals.public_store.get_file(path, mode='wb')
+ media_file.write(
+ media_cache.get_file(path, mode='rb').read())
+
+ print(media_file)
+ print(entry)
+
+ print "\n== Media imported ==\n"
- print "\n== Media exported ==\n"
def _import_database(db, args):
- command = '{mongorestore_path} -d {database} -o {mongodump_cache}'.format(
+ print "\n== Importing database ==\n"
+ command = '{mongorestore_path} -d {database}'
+ '{backup_dir}/{database}'.format(
mongorestore_path=args.mongorestore_path,
database=db.name,
- mongodump_cache=args.mongodump_cache)
+ backup_dir=args._cache_path['database'])
+
+ print command
+
+ p = subprocess.Popen(
+ shlex.split(command))
+
+ p.wait()
+
def env_import(args):
+ args.cache_path += 'mediagoblin-data'
+ args = _setup_paths(args)
+
+ setup_global_and_app_config(args.conf_file)
+ setup_storage()
+
config, validation_result = read_mediagoblin_config(args.conf_file)
connection, db = setup_connection_and_db_from_config(
config['mediagoblin'], use_pymongo=True)
args.tar_file,
mode='r|gz')
- tf.extractall(args.extract_path)
+ tf.extractall(args.cache_path)
+
+ # Import database from extracted data
+ _import_database(db, args)
+
+ _import_media(db, args)
+
def _setup_paths(args):
args._cache_path = dict()
PATH_MAP = {
'media': 'media',
- 'queue': 'queue',
+ 'queue': 'queue',
'database': 'database'}
-
+
for key, val in PATH_MAP.items():
args._cache_path[key] = os.path.join(args.cache_path, val)
return args
+
def _create_archive(args):
print "\n== Compressing to archive ==\n"
+
tf = tarfile.open(
args.tar_file,
mode='w|gz')
- with tf:
- for root, dirs, files in os.walk(args.cache_path):
- print root, dirs, files
- everything = []
- everything.extend(dirs)
- everything.extend(files)
+ with tf:
+ tf.add(args.cache_path, 'mediagoblin-data/')
+
+ print "\n== Archiving done ==\n"
+
+
+def _clean(args):
+ shutil.rmtree(args.cache_path)
- print everything
- for d in everything:
- directory_path = os.path.join(root, d)
- virtual_path = os.path.join(
- root.replace(args.cache_path, 'mediagoblin-data/'), d)
+def _check(args):
+ if os.path.exists(args.tar_file):
+ overwrite = raw_input(
+ 'The output file already exists. '
+ 'Are you **SURE** you want to overwrite it? '
+ '(yes/no)> ')
+ if not overwrite == 'yes':
+ print "Aborting."
- # print 'dir', directory_path, '\n', 'vir', virtual_path
+ return False
- tarinfo = tf.gettarinfo(
- directory_path,
- arcname=virtual_path)
+ if os.path.exists(args.cache_path):
+ print 'The cache directory must not exist before you run this script'
+ print 'Cache directory: ', args.cache_path
- tf.addfile(tarinfo)
+ return False
- print 'added ', d
+ return True
- '''
- mg_data = tf.gettarinfo(
- args.cache_path,
- arcname='mediagoblin-data')
-
- tf.addfile(mg_data)
- '''
- print "\n== Archiving done ==\n"
def env_export(args):
args = _setup_paths(args)
+ if not _check(args):
+ print "\n== Checks did not pass, exiting ==\n"
+ sys.exit(0)
+
setup_global_and_app_config(args.conf_file)
setup_storage()
connection, db = setup_connection_and_db_from_config(
config['mediagoblin'], use_pymongo=True)
- if os.path.exists(args.tar_file):
- overwrite = raw_input(
- 'The output file already exists. '
- 'Are you **SURE** you want to overwrite it? '
- '(yes/no)> ')
- if not overwrite == 'yes':
- print "Aborting."
- return
-
_export_database(db, args)
_export_media(db, args)
_create_archive(args)
+
+ _clean(args)