diff options
-rw-r--r-- | mediagoblin/media_types/image/processing.py | 16 | ||||
-rw-r--r-- | mediagoblin/processing/__init__.py | 31 | ||||
-rw-r--r-- | mediagoblin/processing/task.py | 9 |
3 files changed, 44 insertions, 12 deletions
diff --git a/mediagoblin/media_types/image/processing.py b/mediagoblin/media_types/image/processing.py index 99be848f..541e5109 100644 --- a/mediagoblin/media_types/image/processing.py +++ b/mediagoblin/media_types/image/processing.py @@ -19,7 +19,6 @@ import os import logging from mediagoblin import mg_globals as mgg -from mediagoblin.decorators import get_workbench from mediagoblin.processing import BadMediaFail, \ create_pub_filepath, FilenameBuilder from mediagoblin.tools.exif import exif_fix_image_orientation, \ @@ -95,21 +94,21 @@ def sniff_handler(media_file, **kw): return False -@get_workbench -def process_image(entry, workbench=None): +def process_image(entry): """Code to process an image. Will be run by celery. A Workbench() represents a local tempory dir. It is automatically cleaned up when this function exits. """ + proc_state = entry.proc_state + workbench = proc_state.workbench + # Conversions subdirectory to avoid collisions conversions_subdir = os.path.join( workbench.dir, 'conversions') os.mkdir(conversions_subdir) - queued_filepath = entry.queued_media_file - queued_filename = workbench.localized_file( - mgg.queue_store, queued_filepath, - 'source') + + queued_filename = proc_state.get_queued_filename() name_builder = FilenameBuilder(queued_filename) # EXIF extraction @@ -147,8 +146,7 @@ def process_image(entry, workbench=None): mgg.public_store.copy_local_to_storage(queued_filename, original_filepath) # Remove queued media file from storage and database - mgg.queue_store.delete_file(queued_filepath) - entry.queued_media_file = [] + proc_state.delete_queue_file() # Insert media file information into database media_files_dict = entry.setdefault('media_files', {}) diff --git a/mediagoblin/processing/__init__.py b/mediagoblin/processing/__init__.py index e2bc1a13..738378b8 100644 --- a/mediagoblin/processing/__init__.py +++ b/mediagoblin/processing/__init__.py @@ -74,6 +74,37 @@ class FilenameBuilder(object): ext=self.ext) +class ProcessingState(object): + def __init__(self, entry): + self.entry = entry + self.workbench = None + self.queued_filename = None + + # Monkey patch us onto the entry + entry.proc_state = self + + def set_workbench(self, wb): + self.workbench = wb + + def get_queued_filename(self): + """ + Get the a filename for the original, on local storage + """ + if self.queued_filename is not None: + return self.queued_filename + queued_filepath = self.entry.queued_media_file + queued_filename = self.workbench.localized_file( + mgg.queue_store, queued_filepath, + 'source') + self.queued_filename = queued_filename + return queued_filename + + def delete_queue_file(self): + queued_filepath = self.entry.queued_media_file + mgg.queue_store.delete_file(queued_filepath) + self.entry.queued_media_file = [] + + def mark_entry_failed(entry_id, exc): """ Mark a media entry as having failed in its conversion. diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index e9bbe084..8614c673 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -22,7 +22,7 @@ from celery import registry, task from mediagoblin import mg_globals as mgg from mediagoblin.db.models import MediaEntry -from mediagoblin.processing import mark_entry_failed, BaseProcessingFail +from . import mark_entry_failed, BaseProcessingFail, ProcessingState from mediagoblin.tools.processing import json_processing_callback _log = logging.getLogger(__name__) @@ -85,8 +85,11 @@ class ProcessMedia(task.Task): _log.debug('Processing {0}'.format(entry)) - # run the processing code - entry.media_manager['processor'](entry) + proc_state = ProcessingState(entry) + with mgg.workbench_manager.create() as workbench: + proc_state.set_workbench(workbench) + # run the processing code + entry.media_manager['processor'](entry) # We set the state to processed and save the entry here so there's # no need to save at the end of the processing stage, probably ;) |