Merge branch 'gullydwarf-cfdv-f357_lost_password_functionality'
[mediagoblin.git] / mediagoblin / gmg_commands / import_export.py
1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011 MediaGoblin contributors. See AUTHORS.
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 from mediagoblin import mg_globals
18 from mediagoblin.db.open import setup_connection_and_db_from_config
19 from mediagoblin.init.config import read_mediagoblin_config
20 from mediagoblin.storage import BasicFileStorage
21 from mediagoblin.init import setup_storage, setup_global_and_app_config
22
23 import shutil
24 import tarfile
25 import tempfile
26 import subprocess
27 import os.path
28 import os
29 import sys
30 from contextlib import closing
31
32
33 def import_export_parse_setup(subparser):
34 # TODO: Add default
35 subparser.add_argument(
36 'tar_file')
37 subparser.add_argument(
38 '-cf', '--conf_file', default='mediagoblin.ini',
39 help='Config file used to set up environment')
40 subparser.add_argument(
41 '--mongodump_path', default='mongodump',
42 help='mongodump binary')
43 subparser.add_argument(
44 '--mongorestore_path', default='mongorestore',
45 help='mongorestore binary')
46 subparser.add_argument(
47 '--cache_path',
48 help='Temporary directory where files will be temporarily dumped')
49
50
51 def _import_media(db, args):
52 """
53 Import media files
54
55 Must be called after _import_database()
56 """
57 print "\n== Importing media ==\n"
58
59 media_cache = BasicFileStorage(
60 args._cache_path['media'])
61
62 # TODO: Add import of queue files
63 queue_cache = BasicFileStorage(
64 args._cache_path['queue'])
65
66 for entry in db.media_entries.find():
67 for name, path in entry['media_files'].items():
68 media_file = mg_globals.public_store.get_file(path, mode='wb')
69 media_file.write(
70 media_cache.get_file(path, mode='rb').read())
71
72 print "\n== Media imported ==\n"
73
74
75 def _import_database(db, args):
76 """
77 Restore mongo database from ___.bson files
78 """
79 print "\n== Importing database ==\n"
80
81 p = subprocess.Popen([
82 args.mongorestore_path,
83 '-d', db.name,
84 os.path.join(args._cache_path['database'], db.name)])
85
86 p.wait()
87
88 print "\n== Database imported ==\n"
89
90
91 def env_import(args):
92 """
93 Restore mongo database and media files from a tar archive
94 """
95 if not args.cache_path:
96 args.cache_path = tempfile.mkdtemp()
97
98 setup_global_and_app_config(args.conf_file)
99
100 # Creates mg_globals.public_store and mg_globals.queue_store
101 setup_storage()
102
103 config, validation_result = read_mediagoblin_config(args.conf_file)
104 connection, db = setup_connection_and_db_from_config(
105 config['mediagoblin'], use_pymongo=True)
106
107 tf = tarfile.open(
108 args.tar_file,
109 mode='r|gz')
110
111 tf.extractall(args.cache_path)
112
113 args.cache_path = os.path.join(
114 args.cache_path, 'mediagoblin-data')
115 args = _setup_paths(args)
116
117 # Import database from extracted data
118 _import_database(db, args)
119
120 _import_media(db, args)
121
122 _clean(args)
123
124
125 def _setup_paths(args):
126 """
127 Populate ``args`` variable with cache subpaths
128 """
129 args._cache_path = dict()
130 PATH_MAP = {
131 'media': 'media',
132 'queue': 'queue',
133 'database': 'database'}
134
135 for key, val in PATH_MAP.items():
136 args._cache_path[key] = os.path.join(args.cache_path, val)
137
138 return args
139
140
141 def _create_archive(args):
142 """
143 Create the tar archive
144 """
145 print "\n== Compressing to archive ==\n"
146
147 tf = tarfile.open(
148 args.tar_file,
149 mode='w|gz')
150
151 with closing(tf):
152 tf.add(args.cache_path, 'mediagoblin-data/')
153
154 print "\n== Archiving done ==\n"
155
156
157 def _clean(args):
158 """
159 Remove cache directory
160 """
161 shutil.rmtree(args.cache_path)
162
163
164 def _export_check(args):
165 """
166 Run security checks for export command
167 """
168 if os.path.exists(args.tar_file):
169 overwrite = raw_input(
170 'The output file already exists. '
171 'Are you **SURE** you want to overwrite it? '
172 '(yes/no)> ')
173 if not overwrite == 'yes':
174 print "Aborting."
175
176 return False
177
178 return True
179
180
181 def _export_database(db, args):
182 print "\n== Exporting database ==\n"
183
184 command = '{mongodump_path} -d {database} -o {mongodump_cache}'.format(
185 mongodump_path=args.mongodump_path,
186 database=db.name,
187 mongodump_cache=args._cache_path['database'])
188
189 p = subprocess.Popen([
190 args.mongodump_path,
191 '-d', db.name,
192 '-o', args._cache_path['database']])
193
194 p.wait()
195
196 print "\n== Database exported ==\n"
197
198
199 def _export_media(db, args):
200 print "\n== Exporting media ==\n"
201
202 media_cache = BasicFileStorage(
203 args._cache_path['media'])
204
205 # TODO: Add export of queue files
206 queue_cache = BasicFileStorage(
207 args._cache_path['queue'])
208
209 for entry in db.media_entries.find():
210 for name, path in entry['media_files'].items():
211 mc_file = media_cache.get_file(path, mode='wb')
212 mc_file.write(
213 mg_globals.public_store.get_file(path, mode='rb').read())
214
215 print "\n== Media exported ==\n"
216
217
218 def env_export(args):
219 """
220 Export database and media files to a tar archive
221 """
222 if args.cache_path:
223 if os.path.exists(args.cache_path):
224 print 'The cache directory must not exist before you run this script'
225 print 'Cache directory: ', args.cache_path
226
227 return False
228 else:
229 args.cache_path = tempfile.mkdtemp()
230
231 args = _setup_paths(args)
232
233 if not _export_check(args):
234 print "\n== Checks did not pass, exiting ==\n"
235 sys.exit(0)
236
237 setup_global_and_app_config(args.conf_file)
238 setup_storage()
239
240 config, validation_result = read_mediagoblin_config(args.conf_file)
241 connection, db = setup_connection_and_db_from_config(
242 config['mediagoblin'], use_pymongo=True)
243
244 _export_database(db, args)
245
246 _export_media(db, args)
247
248 _create_archive(args)
249
250 _clean(args)