1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011 Free Software Foundation, Inc
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 from mediagoblin
.gmg_commands
import util
as commands_util
18 from mediagoblin
import mg_globals
19 from mediagoblin
.db
import util
as db_util
20 from mediagoblin
.db
.open import setup_connection_and_db_from_config
21 from mediagoblin
.init
.config
import read_mediagoblin_config
22 from mediagoblin
import util
as mg_util
23 from mediagoblin
.storage
import BasicFileStorage
24 from mediagoblin
.init
import setup_storage
, setup_global_and_app_config
36 def import_export_parse_setup(subparser
):
38 subparser
.add_argument(
40 subparser
.add_argument(
41 '-cf', '--conf_file', default
='mediagoblin.ini',
42 help='Config file used to set up environment')
43 subparser
.add_argument(
44 '--mongodump_path', default
='mongodump',
45 help='mongodump binary')
46 subparser
.add_argument(
47 '--mongorestore_path', default
='mongorestore',
48 help='mongorestore binary')
49 subparser
.add_argument(
50 '--cache_path', default
='/tmp/mediagoblin/',
54 def _import_media(db
, args
):
58 Must be called after _import_database()
60 print "\n== Importing media ==\n"
62 media_cache
= BasicFileStorage(
63 args
._cache
_path
['media'])
65 # TODO: Add import of queue files
66 queue_cache
= BasicFileStorage(
67 args
._cache
_path
['queue'])
69 for entry
in db
.media_entries
.find():
70 for name
, path
in entry
['media_files'].items():
71 media_file
= mg_globals
.public_store
.get_file(path
, mode
='wb')
73 media_cache
.get_file(path
, mode
='rb').read())
78 print "\n== Media imported ==\n"
81 def _import_database(db
, args
):
83 Restore mongo database from ___.bson files
85 print "\n== Importing database ==\n"
87 p
= subprocess
.Popen([
88 args
.mongorestore_path
,
90 os
.path
.join(args
._cache
_path
['database'], db
.name
)])
96 print "\n== Database imported ==\n"
101 Restore mongo database and media files from a tar archive
103 # args.cache_path += 'mediagoblin-data'
104 setup_global_and_app_config(args
.conf_file
)
106 # Creates mg_globals.public_store and mg_globals.queue_store
109 config
, validation_result
= read_mediagoblin_config(args
.conf_file
)
110 connection
, db
= setup_connection_and_db_from_config(
111 config
['mediagoblin'], use_pymongo
=True)
117 tf
.extractall(args
.cache_path
)
119 args
.cache_path
+= 'mediagoblin-data'
120 args
= _setup_paths(args
)
122 # Import database from extracted data
123 _import_database(db
, args
)
125 _import_media(db
, args
)
129 def _setup_paths(args
):
131 Populate ``args`` variable with cache subpaths
133 args
._cache
_path
= dict()
137 'database': 'database'}
139 for key
, val
in PATH_MAP
.items():
140 args
._cache
_path
[key
] = os
.path
.join(args
.cache_path
, val
)
145 def _create_archive(args
):
147 Create the tar archive
149 print "\n== Compressing to archive ==\n"
156 tf
.add(args
.cache_path
, 'mediagoblin-data/')
158 print "\n== Archiving done ==\n"
163 Remove cache directory
165 shutil
.rmtree(args
.cache_path
)
168 def _export_check(args
):
170 Run security checks for export command
172 if os
.path
.exists(args
.tar_file
):
173 overwrite
= raw_input(
174 'The output file already exists. '
175 'Are you **SURE** you want to overwrite it? '
177 if not overwrite
== 'yes':
182 if os
.path
.exists(args
.cache_path
):
183 print 'The cache directory must not exist before you run this script'
184 print 'Cache directory: ', args
.cache_path
191 def _export_database(db
, args
):
192 print "\n== Exporting database ==\n"
194 command
= '{mongodump_path} -d {database} -o {mongodump_cache}'.format(
195 mongodump_path
=args
.mongodump_path
,
197 mongodump_cache
=args
._cache
_path
['database'])
199 p
= subprocess
.Popen([
202 '-o', args
._cache
_path
['database']])
206 print "\n== Database exported ==\n"
209 def _export_media(db
, args
):
210 print "\n== Exporting media ==\n"
212 media_cache
= BasicFileStorage(
213 args
._cache
_path
['media'])
215 # TODO: Add export of queue files
216 queue_cache
= BasicFileStorage(
217 args
._cache
_path
['queue'])
219 for entry
in db
.media_entries
.find():
220 for name
, path
in entry
['media_files'].items():
221 mc_file
= media_cache
.get_file(path
, mode
='wb')
223 mg_globals
.public_store
.get_file(path
, mode
='rb').read())
228 print "\n== Media exported ==\n"
231 def env_export(args
):
233 Export database and media files to a tar archive
235 args
= _setup_paths(args
)
237 if not _export_check(args
):
238 print "\n== Checks did not pass, exiting ==\n"
241 setup_global_and_app_config(args
.conf_file
)
244 config
, validation_result
= read_mediagoblin_config(args
.conf_file
)
245 connection
, db
= setup_connection_and_db_from_config(
246 config
['mediagoblin'], use_pymongo
=True)
248 _export_database(db
, args
)
250 _export_media(db
, args
)
252 _create_archive(args
)