aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/media_types
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/media_types')
-rw-r--r--mediagoblin/media_types/image/__init__.py47
-rw-r--r--mediagoblin/media_types/image/processing.py94
2 files changed, 105 insertions, 36 deletions
diff --git a/mediagoblin/media_types/image/__init__.py b/mediagoblin/media_types/image/__init__.py
index a1b43479..3a056718 100644
--- a/mediagoblin/media_types/image/__init__.py
+++ b/mediagoblin/media_types/image/__init__.py
@@ -15,13 +15,18 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
import datetime
+import logging
+from mediagoblin.db.models import MediaEntry
from mediagoblin.media_types import MediaManagerBase
from mediagoblin.media_types.image.processing import process_image, \
sniff_handler
+from mediagoblin.submit.lib import run_process_media
from mediagoblin.tools import pluginapi
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
+_log = logging.getLogger(__name__)
+
ACCEPTED_EXTENSIONS = ["jpg", "jpeg", "png", "gif", "tiff"]
MEDIA_TYPE = 'mediagoblin.media_types.image'
@@ -69,7 +74,7 @@ def get_media_type_and_manager(ext):
def reprocess_action(args):
if args[0].state == 'processed':
print _('\n Available reprocessing actions for processed images:'
- '\n \t --resize: thumbnail or medium'
+ '\n \t --resize: thumb or medium'
'\n Options:'
'\n \t --size: max_width max_height (defaults to config specs)')
return True
@@ -78,8 +83,7 @@ def reprocess_action(args):
def _parser(args):
parser = argparse.ArgumentParser()
parser.add_argument(
- '--resize',
- action='store_true')
+ '--resize')
parser.add_argument(
'--size',
nargs=2)
@@ -100,6 +104,9 @@ def _check_eligible(entry_args, reprocess_args):
if reprocess_args.resize:
raise Exception(_('You can not run --resize on media that has not'
'been processed.'))
+ if reprocess_args.size:
+ _log.warn('With --initial_processing, the --size flag will be'
+ ' ignored.')
if entry_args.state == 'processing':
raise Exception(_('We currently do not support reprocessing on media'
@@ -111,8 +118,38 @@ def media_reprocess(args):
entry_args = args[0]
_check_eligible(entry_args, reprocess_args)
- import ipdb
- ipdb.set_trace()
+ if reprocess_args.initial_processing:
+ for id in entry_args.media_id:
+ entry = MediaEntry.query.filter_by(id=id).first()
+ # Should we get the feed_url?
+ run_process_media(entry)
+
+ elif reprocess_args.resize:
+ if reprocess_args.resize == 'medium' or reprocess_args.resize == \
+ 'thumb':
+ for id in entry_args.media_id:
+ entry = MediaEntry.query.filter_by(id=id).first()
+
+ # For now we can only reprocess with the original file
+ if not entry.media_files.get('original'):
+ raise Exception(_('The original file for this media entry'
+ 'does not exist.'))
+
+ reprocess_info = {'resize': reprocess_args.resize}
+
+ if reprocess_args.size and len(reprocess_args.size) == 2:
+ reprocess_info['max_width'] = reprocess_args.size[0]
+ reprocess_info['max_height'] = reprocess_args.size[1]
+
+ run_process_media(entry, reprocess_info)
+
+ else:
+ raise Exception(_('The --resize flag must set either "thumb"'
+ ' or "medium".'))
+
+ else:
+ _log.warn('You must set either --resize or --initial_processing flag'
+ ' to reprocess an image.')
hooks = {
diff --git a/mediagoblin/media_types/image/processing.py b/mediagoblin/media_types/image/processing.py
index baf2ac7e..4f619f47 100644
--- a/mediagoblin/media_types/image/processing.py
+++ b/mediagoblin/media_types/image/processing.py
@@ -74,11 +74,13 @@ def resize_image(proc_state, resized, keyname, target_name, new_size,
def resize_tool(proc_state, force, keyname, target_name,
- conversions_subdir, exif_tags):
+ conversions_subdir, exif_tags, new_size=None):
# filename -- the filename of the original image being resized
filename = proc_state.get_queued_filename()
- max_width = mgg.global_config['media:' + keyname]['max_width']
- max_height = mgg.global_config['media:' + keyname]['max_height']
+ if not new_size:
+ max_width = mgg.global_config['media:' + keyname]['max_width']
+ max_height = mgg.global_config['media:' + keyname]['max_height']
+ new_size = (max_width, max_height)
# If the size of the original file exceeds the specified size for the desized
# file, a target_name file is created and later associated with the media
# entry.
@@ -93,7 +95,7 @@ def resize_tool(proc_state, force, keyname, target_name,
or exif_image_needs_rotation(exif_tags):
resize_image(
proc_state, im, unicode(keyname), target_name,
- (max_width, max_height),
+ new_size,
exif_tags, conversions_subdir)
@@ -119,7 +121,7 @@ def sniff_handler(media_file, **kw):
return None
-def process_image(proc_state):
+def process_image(proc_state, reprocess_info=None):
"""Code to process an image. Will be run by celery.
A Workbench() represents a local tempory dir. It is automatically
@@ -127,45 +129,75 @@ def process_image(proc_state):
"""
entry = proc_state.entry
workbench = proc_state.workbench
-
+ import ipdb
+ ipdb.set_trace()
# Conversions subdirectory to avoid collisions
conversions_subdir = os.path.join(
workbench.dir, 'conversions')
os.mkdir(conversions_subdir)
- queued_filename = proc_state.get_queued_filename()
- name_builder = FilenameBuilder(queued_filename)
+ if reprocess_info:
+ _reprocess_image(proc_state, reprocess_info, conversions_subdir)
+
+ else:
+ queued_filename = proc_state.get_queued_filename()
+ name_builder = FilenameBuilder(queued_filename)
- # EXIF extraction
- exif_tags = extract_exif(queued_filename)
- gps_data = get_gps_data(exif_tags)
+ # EXIF extraction
+ exif_tags = extract_exif(queued_filename)
+ gps_data = get_gps_data(exif_tags)
- # Always create a small thumbnail
- resize_tool(proc_state, True, 'thumb',
- name_builder.fill('{basename}.thumbnail{ext}'),
- conversions_subdir, exif_tags)
+ # Always create a small thumbnail
+ resize_tool(proc_state, True, 'thumb',
+ name_builder.fill('{basename}.thumbnail{ext}'),
+ conversions_subdir, exif_tags)
+
+ # Possibly create a medium
+ resize_tool(proc_state, False, 'medium',
+ name_builder.fill('{basename}.medium{ext}'),
+ conversions_subdir, exif_tags)
+
+ # Copy our queued local workbench to its final destination
+ proc_state.copy_original(name_builder.fill('{basename}{ext}'))
+
+ # Remove queued media file from storage and database
+ proc_state.delete_queue_file()
- # Possibly create a medium
- resize_tool(proc_state, False, 'medium',
- name_builder.fill('{basename}.medium{ext}'),
- conversions_subdir, exif_tags)
+ # Insert exif data into database
+ exif_all = clean_exif(exif_tags)
- # Copy our queued local workbench to its final destination
- proc_state.copy_original(name_builder.fill('{basename}{ext}'))
+ if len(exif_all):
+ entry.media_data_init(exif_all=exif_all)
- # Remove queued media file from storage and database
- proc_state.delete_queue_file()
+ if len(gps_data):
+ for key in list(gps_data.keys()):
+ gps_data['gps_' + key] = gps_data.pop(key)
+ entry.media_data_init(**gps_data)
- # Insert exif data into database
- exif_all = clean_exif(exif_tags)
- if len(exif_all):
- entry.media_data_init(exif_all=exif_all)
+def _reprocess_image(proc_state, reprocess_info, conversions_subdir):
+ reprocess_filename = proc_state.get_reprocess_filename()
+ name_builder = FilenameBuilder(reprocess_filename)
- if len(gps_data):
- for key in list(gps_data.keys()):
- gps_data['gps_' + key] = gps_data.pop(key)
- entry.media_data_init(**gps_data)
+ exif_tags = extract_exif(reprocess_filename)
+
+ if reprocess_info.get('max_width'):
+ max_width = reprocess_info['max_width']
+ else:
+ max_width = mgg.global_config \
+ ['media:' + reprocess_info['resize']]['max_width']
+
+ if reprocess_info.get('max_height'):
+ max_height = reprocess_info['max_height']
+ else:
+ max_height = mgg.global_config \
+ ['media:' + reprocess_info['resize']]['max_height']
+
+ new_size = (max_width, max_height)
+
+ resize_tool(proc_state, False, reprocess_info['resize'],
+ name_builder.fill('{basename}.thumbnail{ext}'),
+ conversions_subdir, exif_tags, new_size)
if __name__ == '__main__':