Make gmg's -cf option a global option
[mediagoblin.git] / mediagoblin / gmg_commands / import_export.py
1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011 MediaGoblin contributors. See AUTHORS.
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 from mediagoblin import mg_globals
18 from mediagoblin.db.open import setup_connection_and_db_from_config
19 from mediagoblin.init.config import read_mediagoblin_config
20 from mediagoblin.storage.filestorage import BasicFileStorage
21 from mediagoblin.init import setup_storage, setup_global_and_app_config
22
23 import shutil
24 import tarfile
25 import tempfile
26 import subprocess
27 import os.path
28 import os
29 import sys
30 import logging
31 from contextlib import closing
32
33 _log = logging.getLogger('gmg.import_export')
34 logging.basicConfig()
35 _log.setLevel(logging.INFO)
36
37
38 def import_export_parse_setup(subparser):
39 # TODO: Add default
40 subparser.add_argument(
41 'tar_file')
42 subparser.add_argument(
43 '--mongodump_path', default='mongodump',
44 help='mongodump binary')
45 subparser.add_argument(
46 '--mongorestore_path', default='mongorestore',
47 help='mongorestore binary')
48 subparser.add_argument(
49 '--cache_path',
50 help='Temporary directory where files will be temporarily dumped')
51
52
53 def _import_media(db, args):
54 '''
55 Import media files
56
57 Must be called after _import_database()
58 '''
59 _log.info('-> Importing media...')
60
61 media_cache = BasicFileStorage(
62 args._cache_path['media'])
63
64 # TODO: Add import of queue files
65 queue_cache = BasicFileStorage(
66 args._cache_path['queue'])
67
68 for entry in db.media_entries.find():
69 for name, path in entry['media_files'].items():
70 _log.info('Importing: {0} - {1}'.format(
71 entry['title'],
72 name))
73
74 media_file = mg_globals.public_store.get_file(path, mode='wb')
75 media_file.write(
76 media_cache.get_file(path, mode='rb').read())
77
78 _log.info('...Media imported')
79
80
81 def _import_database(db, args):
82 '''
83 Restore mongo database from ___.bson files
84 '''
85 _log.info('-> Importing database...')
86
87 p = subprocess.Popen([
88 args.mongorestore_path,
89 '-d', db.name,
90 os.path.join(args._cache_path['database'], db.name)])
91
92 p.wait()
93
94 _log.info('...Database imported')
95
96
97 def env_import(args):
98 '''
99 Restore mongo database and media files from a tar archive
100 '''
101 if not args.cache_path:
102 args.cache_path = tempfile.mkdtemp()
103
104 setup_global_and_app_config(args.conf_file)
105
106 # Creates mg_globals.public_store and mg_globals.queue_store
107 setup_storage()
108
109 global_config, app_config = setup_global_and_app_config(args.conf_file)
110 connection, db = setup_connection_and_db_from_config(
111 app_config, use_pymongo=True)
112
113 tf = tarfile.open(
114 args.tar_file,
115 mode='r|gz')
116
117 tf.extractall(args.cache_path)
118
119 args.cache_path = os.path.join(
120 args.cache_path, 'mediagoblin-data')
121 args = _setup_paths(args)
122
123 # Import database from extracted data
124 _import_database(db, args)
125
126 _import_media(db, args)
127
128 _clean(args)
129
130
131 def _setup_paths(args):
132 '''
133 Populate ``args`` variable with cache subpaths
134 '''
135 args._cache_path = dict()
136 PATH_MAP = {
137 'media': 'media',
138 'queue': 'queue',
139 'database': 'database'}
140
141 for key, val in PATH_MAP.items():
142 args._cache_path[key] = os.path.join(args.cache_path, val)
143
144 return args
145
146
147 def _create_archive(args):
148 '''
149 Create the tar archive
150 '''
151 _log.info('-> Compressing to archive')
152
153 tf = tarfile.open(
154 args.tar_file,
155 mode='w|gz')
156
157 with closing(tf):
158 tf.add(args.cache_path, 'mediagoblin-data/')
159
160 _log.info('...Archiving done')
161
162
163 def _clean(args):
164 '''
165 Remove cache directory
166 '''
167 shutil.rmtree(args.cache_path)
168
169
170 def _export_check(args):
171 '''
172 Run security checks for export command
173 '''
174 if os.path.exists(args.tar_file):
175 overwrite = raw_input(
176 'The output file already exists. '
177 'Are you **SURE** you want to overwrite it? '
178 '(yes/no)> ')
179 if not overwrite == 'yes':
180 print 'Aborting.'
181
182 return False
183
184 return True
185
186
187 def _export_database(db, args):
188 _log.info('-> Exporting database...')
189
190 p = subprocess.Popen([
191 args.mongodump_path,
192 '-d', db.name,
193 '-o', args._cache_path['database']])
194
195 p.wait()
196
197 _log.info('...Database exported')
198
199
200 def _export_media(db, args):
201 _log.info('-> Exporting media...')
202
203 media_cache = BasicFileStorage(
204 args._cache_path['media'])
205
206 # TODO: Add export of queue files
207 queue_cache = BasicFileStorage(
208 args._cache_path['queue'])
209
210 for entry in db.media_entries.find():
211 for name, path in entry['media_files'].items():
212 _log.info('Exporting {0} - {1}'.format(
213 entry['title'],
214 name))
215
216 mc_file = media_cache.get_file(path, mode='wb')
217 mc_file.write(
218 mg_globals.public_store.get_file(path, mode='rb').read())
219
220 _log.info('...Media exported')
221
222
223 def env_export(args):
224 '''
225 Export database and media files to a tar archive
226 '''
227 if args.cache_path:
228 if os.path.exists(args.cache_path):
229 _log.error('The cache directory must not exist before you run this script')
230 _log.error('Cache directory: {0}'.format(args.cache_path))
231
232 return False
233 else:
234 args.cache_path = tempfile.mkdtemp()
235
236 args = _setup_paths(args)
237
238 if not _export_check(args):
239 _log.error('Checks did not pass, exiting')
240 sys.exit(0)
241
242 globa_config, app_config = setup_global_and_app_config(args.conf_file)
243
244 setup_storage()
245
246 connection, db = setup_connection_and_db_from_config(
247 app_config, use_pymongo=True)
248
249 _export_database(db, args)
250
251 _export_media(db, args)
252
253 _create_archive(args)
254
255 _clean(args)