1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011 Free Software Foundation, Inc
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 from mediagoblin
.gmg_commands
import util
as commands_util
18 from mediagoblin
import mg_globals
19 from mediagoblin
.db
import util
as db_util
20 from mediagoblin
.db
.open import setup_connection_and_db_from_config
21 from mediagoblin
.init
.config
import read_mediagoblin_config
22 from mediagoblin
import util
as mg_util
23 from mediagoblin
.storage
import BasicFileStorage
24 from mediagoblin
.init
import setup_storage
, setup_global_and_app_config
36 def import_export_parse_setup(subparser
):
38 subparser
.add_argument(
40 subparser
.add_argument(
41 '-cf', '--conf_file', default
='mediagoblin.ini',
42 help='Config file used to set up environment')
43 subparser
.add_argument(
44 '--mongodump_path', default
='mongodump',
45 help='mongodump binary')
46 subparser
.add_argument(
47 '--mongorestore_path', default
='mongorestore',
48 help='mongorestore binary')
49 subparser
.add_argument(
50 '--cache_path', default
='/tmp/mediagoblin/',
54 def _export_database(db
, args
):
55 print "\n== Exporting database ==\n"
57 command
= '{mongodump_path} -d {database} -o {mongodump_cache}'.format(
58 mongodump_path
=args
.mongodump_path
,
60 mongodump_cache
=args
._cache
_path
['database'])
67 print "\n== Database exported ==\n"
70 def _export_media(db
, args
):
71 print "\n== Exporting media ==\n"
73 media_cache
= BasicFileStorage(
74 args
._cache
_path
['media'])
76 # TODO: Add export of queue files
77 queue_cache
= BasicFileStorage(
78 args
._cache
_path
['queue'])
80 for entry
in db
.media_entries
.find():
81 for name
, path
in entry
['media_files'].items():
82 mc_file
= media_cache
.get_file(path
, mode
='wb')
84 mg_globals
.public_store
.get_file(path
, mode
='rb').read())
89 print "\n== Media exported ==\n"
92 def _import_media(db
, args
):
96 Must be called after _import_database()
98 print "\n== Importing media ==\n"
100 media_cache
= BasicFileStorage(
101 args
._cache
_path
['media'])
103 # TODO: Add import of queue files
104 queue_cache
= BasicFileStorage(
105 args
._cache
_path
['queue'])
107 for entry
in db
.media_entries
.find():
108 for name
, path
in entry
['media_files'].items():
109 media_file
= mg_globals
.public_store
.get_file(path
, mode
='wb')
111 media_cache
.get_file(path
, mode
='rb').read())
116 print "\n== Media imported ==\n"
119 def _import_database(db
, args
):
120 print "\n== Importing database ==\n"
121 command
= '{mongorestore_path} -d {database}'
122 '{backup_dir}/{database}'.format(
123 mongorestore_path
=args
.mongorestore_path
,
125 backup_dir
=args
._cache
_path
['database'])
129 p
= subprocess
.Popen(
130 shlex
.split(command
))
135 def env_import(args
):
136 args
.cache_path
+= 'mediagoblin-data'
137 args
= _setup_paths(args
)
139 setup_global_and_app_config(args
.conf_file
)
142 config
, validation_result
= read_mediagoblin_config(args
.conf_file
)
143 connection
, db
= setup_connection_and_db_from_config(
144 config
['mediagoblin'], use_pymongo
=True)
150 tf
.extractall(args
.cache_path
)
152 # Import database from extracted data
153 _import_database(db
, args
)
155 _import_media(db
, args
)
158 def _setup_paths(args
):
159 args
._cache
_path
= dict()
163 'database': 'database'}
165 for key
, val
in PATH_MAP
.items():
166 args
._cache
_path
[key
] = os
.path
.join(args
.cache_path
, val
)
171 def _create_archive(args
):
172 print "\n== Compressing to archive ==\n"
179 tf
.add(args
.cache_path
, 'mediagoblin-data/')
181 print "\n== Archiving done ==\n"
185 shutil
.rmtree(args
.cache_path
)
189 if os
.path
.exists(args
.tar_file
):
190 overwrite
= raw_input(
191 'The output file already exists. '
192 'Are you **SURE** you want to overwrite it? '
194 if not overwrite
== 'yes':
199 if os
.path
.exists(args
.cache_path
):
200 print 'The cache directory must not exist before you run this script'
201 print 'Cache directory: ', args
.cache_path
208 def env_export(args
):
209 args
= _setup_paths(args
)
212 print "\n== Checks did not pass, exiting ==\n"
215 setup_global_and_app_config(args
.conf_file
)
218 config
, validation_result
= read_mediagoblin_config(args
.conf_file
)
219 connection
, db
= setup_connection_and_db_from_config(
220 config
['mediagoblin'], use_pymongo
=True)
222 _export_database(db
, args
)
224 _export_media(db
, args
)
226 _create_archive(args
)