2445cd4d8f7daaf5edfffd8f60191c7f7e0f93fa
[mediagoblin.git] / mediagoblin / media_types / video / migrations.py
1 # GNU MediaGoblin -- federated, autonomous media hosting
2 # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 from mediagoblin.db.migration_tools import RegisterMigration, inspect_table
18
19 from sqlalchemy import MetaData, Column, Unicode
20
21 import json
22
23 MIGRATIONS = {}
24
25
26 @RegisterMigration(1, MIGRATIONS)
27 def add_orig_metadata_column(db_conn):
28 metadata = MetaData(bind=db_conn.bind)
29
30 vid_data = inspect_table(metadata, "video__mediadata")
31
32 col = Column('orig_metadata', Unicode,
33 default=None, nullable=True)
34 col.create(vid_data)
35 db_conn.commit()
36
37
38 @RegisterMigration(2, MIGRATIONS)
39 def webm_640_to_webm_video(db):
40 metadata = MetaData(bind=db.bind)
41
42 file_keynames = inspect_table(metadata, 'core__file_keynames')
43
44 for row in db.execute(file_keynames.select()):
45 if row.name == 'webm_640':
46 db.execute(
47 file_keynames.update(). \
48 where(file_keynames.c.id==row.id).\
49 values(name='webm_video'))
50
51 db.commit()
52
53
54 @RegisterMigration(3, MIGRATIONS)
55 def change_metadata_format(db):
56 """Change orig_metadata format for multi-stream a-v"""
57 db_metadata = MetaData(bind=db.bind)
58
59 vid_data = inspect_table(db_metadata, "video__mediadata")
60
61 for row in db.execute(vid_data.select()):
62 if not row.orig_metadata:
63 continue
64
65 metadata = json.loads(row.orig_metadata)
66
67 # before this migration there was info about only one video or audio
68 # stream. So, we store existing info as the first item in the list
69 new_metadata = {'audio': [], 'video': [], 'common': {}}
70 video_key_map = { # old: new
71 'videoheight': 'height',
72 'videowidth': 'width',
73 'videorate': 'rate',
74 }
75 audio_key_map = { # old: new
76 'audiochannels': 'channels',
77 }
78 common_key_map = {
79 'videolength': 'length',
80 }
81
82 new_metadata['video'] = [dict((v, metadata.get(k))
83 for k, v in video_key_map.items() if metadata.get(k))]
84 new_metadata['audio'] = [dict((v, metadata.get(k))
85 for k, v in audio_key_map.items() if metadata.get(k))]
86 new_metadata['common'] = dict((v, metadata.get(k))
87 for k, v in common_key_map.items() if metadata.get(k))
88
89 # 'mimetype' should be in tags
90 new_metadata['common']['tags'] = {'mimetype': metadata.get('mimetype')}
91 if 'tags' in metadata:
92 new_metadata['video'][0]['tags'] = {}
93 new_metadata['audio'][0]['tags'] = {}
94
95 tags = metadata['tags']
96
97 video_keys = ['encoder', 'encoder-version', 'video-codec']
98 audio_keys = ['audio-codec']
99
100 for t, v in tags.items():
101 if t in video_keys:
102 new_metadata['video'][0]['tags'][t] = tags[t]
103 elif t in audio_keys:
104 new_metadata['audio'][0]['tags'][t] = tags[t]
105 else:
106 new_metadata['common']['tags'][t] = tags[t]
107 db.execute(vid_data.update()
108 .where(vid_data.c.media_entry==row.media_entry)
109 .values(orig_metadata=json.dumps(new_metadata)))
110 db.commit()