1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011 Free Software Foundation, Inc
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 from mediagoblin
import mg_globals
18 from mediagoblin
.db
.open import setup_connection_and_db_from_config
19 from mediagoblin
.init
.config
import read_mediagoblin_config
20 from mediagoblin
.storage
import BasicFileStorage
21 from mediagoblin
.init
import setup_storage
, setup_global_and_app_config
29 from contextlib
import closing
32 def import_export_parse_setup(subparser
):
34 subparser
.add_argument(
36 subparser
.add_argument(
37 '-cf', '--conf_file', default
='mediagoblin.ini',
38 help='Config file used to set up environment')
39 subparser
.add_argument(
40 '--mongodump_path', default
='mongodump',
41 help='mongodump binary')
42 subparser
.add_argument(
43 '--mongorestore_path', default
='mongorestore',
44 help='mongorestore binary')
45 subparser
.add_argument(
46 '--cache_path', default
='/tmp/mediagoblin/',
50 def _import_media(db
, args
):
54 Must be called after _import_database()
56 print "\n== Importing media ==\n"
58 media_cache
= BasicFileStorage(
59 args
._cache
_path
['media'])
61 # TODO: Add import of queue files
62 queue_cache
= BasicFileStorage(
63 args
._cache
_path
['queue'])
65 for entry
in db
.media_entries
.find():
66 for name
, path
in entry
['media_files'].items():
67 media_file
= mg_globals
.public_store
.get_file(path
, mode
='wb')
69 media_cache
.get_file(path
, mode
='rb').read())
71 print "\n== Media imported ==\n"
74 def _import_database(db
, args
):
76 Restore mongo database from ___.bson files
78 print "\n== Importing database ==\n"
80 p
= subprocess
.Popen([
81 args
.mongorestore_path
,
83 os
.path
.join(args
._cache
_path
['database'], db
.name
)])
87 print "\n== Database imported ==\n"
92 Restore mongo database and media files from a tar archive
94 # args.cache_path += 'mediagoblin-data'
95 setup_global_and_app_config(args
.conf_file
)
97 # Creates mg_globals.public_store and mg_globals.queue_store
100 config
, validation_result
= read_mediagoblin_config(args
.conf_file
)
101 connection
, db
= setup_connection_and_db_from_config(
102 config
['mediagoblin'], use_pymongo
=True)
108 tf
.extractall(args
.cache_path
)
110 args
.cache_path
+= 'mediagoblin-data'
111 args
= _setup_paths(args
)
113 # Import database from extracted data
114 _import_database(db
, args
)
116 _import_media(db
, args
)
120 def _setup_paths(args
):
122 Populate ``args`` variable with cache subpaths
124 args
._cache
_path
= dict()
128 'database': 'database'}
130 for key
, val
in PATH_MAP
.items():
131 args
._cache
_path
[key
] = os
.path
.join(args
.cache_path
, val
)
136 def _create_archive(args
):
138 Create the tar archive
140 print "\n== Compressing to archive ==\n"
147 tf
.add(args
.cache_path
, 'mediagoblin-data/')
149 print "\n== Archiving done ==\n"
154 Remove cache directory
156 shutil
.rmtree(args
.cache_path
)
159 def _export_check(args
):
161 Run security checks for export command
163 if os
.path
.exists(args
.tar_file
):
164 overwrite
= raw_input(
165 'The output file already exists. '
166 'Are you **SURE** you want to overwrite it? '
168 if not overwrite
== 'yes':
173 if os
.path
.exists(args
.cache_path
):
174 print 'The cache directory must not exist before you run this script'
175 print 'Cache directory: ', args
.cache_path
182 def _export_database(db
, args
):
183 print "\n== Exporting database ==\n"
185 command
= '{mongodump_path} -d {database} -o {mongodump_cache}'.format(
186 mongodump_path
=args
.mongodump_path
,
188 mongodump_cache
=args
._cache
_path
['database'])
190 p
= subprocess
.Popen([
193 '-o', args
._cache
_path
['database']])
197 print "\n== Database exported ==\n"
200 def _export_media(db
, args
):
201 print "\n== Exporting media ==\n"
203 media_cache
= BasicFileStorage(
204 args
._cache
_path
['media'])
206 # TODO: Add export of queue files
207 queue_cache
= BasicFileStorage(
208 args
._cache
_path
['queue'])
210 for entry
in db
.media_entries
.find():
211 for name
, path
in entry
['media_files'].items():
212 mc_file
= media_cache
.get_file(path
, mode
='wb')
214 mg_globals
.public_store
.get_file(path
, mode
='rb').read())
216 print "\n== Media exported ==\n"
219 def env_export(args
):
221 Export database and media files to a tar archive
223 args
= _setup_paths(args
)
225 if not _export_check(args
):
226 print "\n== Checks did not pass, exiting ==\n"
229 setup_global_and_app_config(args
.conf_file
)
232 config
, validation_result
= read_mediagoblin_config(args
.conf_file
)
233 connection
, db
= setup_connection_and_db_from_config(
234 config
['mediagoblin'], use_pymongo
=True)
236 _export_database(db
, args
)
238 _export_media(db
, args
)
240 _create_archive(args
)