1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 from mediagoblin
import mg_globals
18 from mediagoblin
.db
.open import setup_connection_and_db_from_config
19 from mediagoblin
.storage
.filestorage
import BasicFileStorage
20 from mediagoblin
.init
import setup_storage
, setup_global_and_app_config
30 from contextlib
import closing
32 _log
= logging
.getLogger('gmg.import_export')
34 _log
.setLevel(logging
.INFO
)
37 def import_export_parse_setup(subparser
):
39 subparser
.add_argument(
41 subparser
.add_argument(
42 '--mongodump_path', default
='mongodump',
43 help='mongodump binary')
44 subparser
.add_argument(
45 '--mongorestore_path', default
='mongorestore',
46 help='mongorestore binary')
47 subparser
.add_argument(
49 help='Temporary directory where files will be temporarily dumped')
52 def _import_media(db
, args
):
56 Must be called after _import_database()
58 _log
.info('-> Importing media...')
60 media_cache
= BasicFileStorage(
61 args
._cache
_path
['media'])
63 # TODO: Add import of queue files
64 queue_cache
= BasicFileStorage(args
._cache
_path
['queue'])
66 for entry
in db
.MediaEntry
.find():
67 for name
, path
in entry
.media_files
.items():
68 _log
.info('Importing: {0} - {1}'.format(
69 entry
.title
.encode('ascii', 'replace'),
72 media_file
= mg_globals
.public_store
.get_file(path
, mode
='wb')
74 media_cache
.get_file(path
, mode
='rb').read())
76 _log
.info('...Media imported')
79 def _import_database(db
, args
):
81 Restore mongo database from ___.bson files
83 _log
.info('-> Importing database...')
85 p
= subprocess
.Popen([
86 args
.mongorestore_path
,
88 os
.path
.join(args
._cache
_path
['database'], db
.name
)])
92 _log
.info('...Database imported')
97 Restore mongo database and media files from a tar archive
99 if not args
.cache_path
:
100 args
.cache_path
= tempfile
.mkdtemp()
102 setup_global_and_app_config(args
.conf_file
)
104 # Creates mg_globals.public_store and mg_globals.queue_store
107 global_config
, app_config
= setup_global_and_app_config(args
.conf_file
)
108 db
= setup_connection_and_db_from_config(
115 tf
.extractall(args
.cache_path
)
117 args
.cache_path
= os
.path
.join(
118 args
.cache_path
, 'mediagoblin-data')
119 args
= _setup_paths(args
)
121 # Import database from extracted data
122 _import_database(db
, args
)
124 _import_media(db
, args
)
129 def _setup_paths(args
):
131 Populate ``args`` variable with cache subpaths
133 args
._cache
_path
= dict()
137 'database': 'database'}
139 for key
, val
in PATH_MAP
.items():
140 args
._cache
_path
[key
] = os
.path
.join(args
.cache_path
, val
)
145 def _create_archive(args
):
147 Create the tar archive
149 _log
.info('-> Compressing to archive')
156 tf
.add(args
.cache_path
, 'mediagoblin-data/')
158 _log
.info('...Archiving done')
163 Remove cache directory
165 shutil
.rmtree(args
.cache_path
)
168 def _export_check(args
):
170 Run security checks for export command
172 if os
.path
.exists(args
.tar_file
):
173 overwrite
= raw_input(
174 'The output file already exists. '
175 'Are you **SURE** you want to overwrite it? '
177 if not overwrite
== 'yes':
185 def _export_database(db
, args
):
186 _log
.info('-> Exporting database...')
188 p
= subprocess
.Popen([
191 '-o', args
._cache
_path
['database']])
195 _log
.info('...Database exported')
198 def _export_media(db
, args
):
199 _log
.info('-> Exporting media...')
201 media_cache
= BasicFileStorage(
202 args
._cache
_path
['media'])
204 # TODO: Add export of queue files
205 queue_cache
= BasicFileStorage(args
._cache
_path
['queue'])
207 for entry
in db
.MediaEntry
.find():
208 for name
, path
in entry
.media_files
.items():
209 _log
.info(u
'Exporting {0} - {1}'.format(
213 mc_file
= media_cache
.get_file(path
, mode
='wb')
215 mg_globals
.public_store
.get_file(path
, mode
='rb').read())
216 except Exception as e
:
217 _log
.error('Failed: {0}'.format(e
))
219 _log
.info('...Media exported')
222 def env_export(args
):
224 Export database and media files to a tar archive
227 if os
.path
.exists(args
.cache_path
):
228 _log
.error('The cache directory must not exist '
229 'before you run this script')
230 _log
.error('Cache directory: {0}'.format(args
.cache_path
))
234 args
.cache_path
= tempfile
.mkdtemp()
236 args
= _setup_paths(args
)
238 if not _export_check(args
):
239 _log
.error('Checks did not pass, exiting')
242 globa_config
, app_config
= setup_global_and_app_config(args
.conf_file
)
246 db
= setup_connection_and_db_from_config(app_config
)
248 _export_database(db
, args
)
250 _export_media(db
, args
)
252 _create_archive(args
)