1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
|
# GNU MediaGoblin -- federated, autonomous media hosting
# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from mediagoblin.db.migration_tools import RegisterMigration, inspect_table
from sqlalchemy import MetaData, Column, Unicode
import json
MIGRATIONS = {}
@RegisterMigration(1, MIGRATIONS)
def add_orig_metadata_column(db_conn):
metadata = MetaData(bind=db_conn.bind)
vid_data = inspect_table(metadata, "video__mediadata")
col = Column('orig_metadata', Unicode,
default=None, nullable=True)
col.create(vid_data)
db_conn.commit()
@RegisterMigration(2, MIGRATIONS)
def webm_640_to_webm_video(db):
metadata = MetaData(bind=db.bind)
file_keynames = inspect_table(metadata, 'core__file_keynames')
for row in db.execute(file_keynames.select()):
if row.name == 'webm_640':
db.execute(
file_keynames.update(). \
where(file_keynames.c.id==row.id).\
values(name='webm_video'))
db.commit()
@RegisterMigration(3, MIGRATIONS)
def change_metadata_format(db):
"""Change orig_metadata format for multi-stream a-v"""
db_metadata = MetaData(bind=db.bind)
vid_data = inspect_table(db_metadata, "video__mediadata")
for row in db.execute(vid_data.select()):
if not row.orig_metadata:
continue
metadata = json.loads(row.orig_metadata)
# before this migration there was info about only one video or audio
# stream. So, we store existing info as the first item in the list
new_metadata = {'audio': [], 'video': [], 'common': {}}
video_key_map = { # old: new
'videoheight': 'height',
'videowidth': 'width',
'videorate': 'rate',
}
audio_key_map = { # old: new
'audiochannels': 'channels',
}
common_key_map = {
'videolength': 'length',
}
new_metadata['video'] = [{v: metadata.get(k)
for k, v in video_key_map.items() if metadata.get(k)}]
new_metadata['audio'] = [{v: metadata.get(k)
for k, v in audio_key_map.items() if metadata.get(k)}]
new_metadata['common'] = {v: metadata.get(k)
for k, v in common_key_map.items() if metadata.get(k)}
# 'mimetype' should be in tags
new_metadata['common']['tags'] = {'mimetype': metadata.get('mimetype')}
if 'tags' in metadata:
new_metadata['video'][0]['tags'] = {}
new_metadata['audio'][0]['tags'] = {}
tags = metadata['tags']
video_keys = ['encoder', 'encoder-version', 'video-codec']
audio_keys = ['audio-codec']
for t, v in tags.items():
if t in video_keys:
new_metadata['video'][0]['tags'][t] = tags[t]
elif t in audio_keys:
new_metadata['audio'][0]['tags'][t] = tags[t]
else:
new_metadata['common']['tags'][t] = tags[t]
db.execute(vid_data.update()
.where(vid_data.c.media_entry==row.media_entry)
.values(orig_metadata=json.dumps(new_metadata)))
db.commit()
|