From eace050a7d41333e659f049d61b7505fc5fbf627 Mon Sep 17 00:00:00 2001 From: Elrond Date: Wed, 21 Mar 2012 11:39:52 +0100 Subject: Move celery task into own task.py Move the actual celery task from processing/__init__.py into its own .../task.py. That way it can be imported as needed. --- mediagoblin/processing/task.py | 78 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 mediagoblin/processing/task.py (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py new file mode 100644 index 00000000..901d293b --- /dev/null +++ b/mediagoblin/processing/task.py @@ -0,0 +1,78 @@ +# GNU MediaGoblin -- federated, autonomous media hosting +# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import logging + +from celery.task import Task + +from mediagoblin import mg_globals as mgg +from mediagoblin.db.util import ObjectId +from mediagoblin.media_types import get_media_manager +from mediagoblin.processing import mark_entry_failed, BaseProcessingFail + +_log = logging.getLogger(__name__) + + +################################ +# Media processing initial steps +################################ + +class ProcessMedia(Task): + """ + DEPRECATED -- This now resides in the individual media plugins + + Pass this entry off for processing. + """ + def run(self, media_id): + """ + Pass the media entry off to the appropriate processing function + (for now just process_image...) + """ + entry = mgg.database.MediaEntry.one( + {'_id': ObjectId(media_id)}) + + # Try to process, and handle expected errors. + try: + #__import__(entry.media_type) + manager = get_media_manager(entry.media_type) + _log.debug('Processing {0}'.format(entry)) + manager['processor'](entry) + except BaseProcessingFail, exc: + mark_entry_failed(entry._id, exc) + return + except ImportError, exc: + _log.error( + 'Entry {0} failed to process due to an import error: {1}'\ + .format( + entry.title, + exc)) + + mark_entry_failed(entry._id, exc) + + entry.state = u'processed' + entry.save() + + def on_failure(self, exc, task_id, args, kwargs, einfo): + """ + If the processing failed we should mark that in the database. + + Assuming that the exception raised is a subclass of + BaseProcessingFail, we can use that to get more information + about the failure and store that for conveying information to + users about the failure, etc. + """ + entry_id = args[0] + mark_entry_failed(entry_id, exc) -- cgit v1.2.3 From 51eb0267d901bafcc90879dadbc2b8616ecdc4f5 Mon Sep 17 00:00:00 2001 From: Joar Wandborg Date: Tue, 10 Jul 2012 17:53:37 +0200 Subject: Minor improvements to the processing panel - It is now possible to actually see what's processing, due to a bug fix where __getitem__ was called on the db model. - Removed DEPRECATED message from the docstring, it wasn't true. --- mediagoblin/processing/task.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 901d293b..af815362 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -24,6 +24,8 @@ from mediagoblin.media_types import get_media_manager from mediagoblin.processing import mark_entry_failed, BaseProcessingFail _log = logging.getLogger(__name__) +logging.basicConfig() +_log.setLevel(logging.DEBUG) ################################ @@ -32,8 +34,6 @@ _log = logging.getLogger(__name__) class ProcessMedia(Task): """ - DEPRECATED -- This now resides in the individual media plugins - Pass this entry off for processing. """ def run(self, media_id): @@ -44,16 +44,21 @@ class ProcessMedia(Task): entry = mgg.database.MediaEntry.one( {'_id': ObjectId(media_id)}) + _log.info('Running task {0} on media {1}: {2}'.format( + self.name, + entry._id, + entry.title)) + # Try to process, and handle expected errors. try: #__import__(entry.media_type) manager = get_media_manager(entry.media_type) _log.debug('Processing {0}'.format(entry)) manager['processor'](entry) - except BaseProcessingFail, exc: + except BaseProcessingFail as exc: mark_entry_failed(entry._id, exc) return - except ImportError, exc: + except ImportError as exc: _log.error( 'Entry {0} failed to process due to an import error: {1}'\ .format( -- cgit v1.2.3 From 6471291575c97f03d129051dc3d2bef28b4d89f2 Mon Sep 17 00:00:00 2001 From: Joar Wandborg Date: Wed, 11 Jul 2012 00:36:42 +0200 Subject: Panel improvements - Added progress meter for video and audio media types. - Changed the __repr__ method of a MediaEntry to display a bit more useful explanation. - Added a new MediaEntry.state, 'processing', which means that the task is running the processor on the item currently. - Fixed some PEP8 issues in user_pages/views.py - Fixed the ATOM TAG URI to show the correct year. --- mediagoblin/processing/task.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index af815362..58e36a11 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -44,20 +44,24 @@ class ProcessMedia(Task): entry = mgg.database.MediaEntry.one( {'_id': ObjectId(media_id)}) - _log.info('Running task {0} on media {1}: {2}'.format( - self.name, - entry._id, - entry.title)) - # Try to process, and handle expected errors. try: - #__import__(entry.media_type) manager = get_media_manager(entry.media_type) + + entry.state = u'processing' + entry.save() + _log.debug('Processing {0}'.format(entry)) + manager['processor'](entry) + + entry.state = u'processed' + entry.save() + except BaseProcessingFail as exc: mark_entry_failed(entry._id, exc) return + except ImportError as exc: _log.error( 'Entry {0} failed to process due to an import error: {1}'\ @@ -67,9 +71,6 @@ class ProcessMedia(Task): mark_entry_failed(entry._id, exc) - entry.state = u'processed' - entry.save() - def on_failure(self, exc, task_id, args, kwargs, einfo): """ If the processing failed we should mark that in the database. -- cgit v1.2.3 From 2891b2c6d0526f0faab78f182d3d084717e7691e Mon Sep 17 00:00:00 2001 From: Joar Wandborg Date: Wed, 1 Aug 2012 01:16:00 +0200 Subject: All processing exceptions are now logged All processing exceptions should now be logged, the MediaEntry marked as failed, the exception re-raised. --- mediagoblin/processing/task.py | 8 ++++++++ 1 file changed, 8 insertions(+) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 58e36a11..e46d2dfd 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -71,6 +71,14 @@ class ProcessMedia(Task): mark_entry_failed(entry._id, exc) + except Exception as exc: + _log.error('An unhandled exception was raised while' + + ' processing {0}'.format( + entry)) + + mark_entry_failed(entry._id, exc) + raise + def on_failure(self, exc, task_id, args, kwargs, einfo): """ If the processing failed we should mark that in the database. -- cgit v1.2.3 From 5354f954dc94aafd35bc037faad2412f73320d8c Mon Sep 17 00:00:00 2001 From: Joar Wandborg Date: Mon, 24 Sep 2012 23:47:32 +0200 Subject: Added support for http callbacks on processing Sends an HTTP POST request back to an URL given on submission to the API submit view. --- mediagoblin/processing/task.py | 8 ++++++++ 1 file changed, 8 insertions(+) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index e46d2dfd..7f4b8429 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -22,6 +22,7 @@ from mediagoblin import mg_globals as mgg from mediagoblin.db.util import ObjectId from mediagoblin.media_types import get_media_manager from mediagoblin.processing import mark_entry_failed, BaseProcessingFail +from mediagoblin.tools.processing import json_processing_callback _log = logging.getLogger(__name__) logging.basicConfig() @@ -58,8 +59,10 @@ class ProcessMedia(Task): entry.state = u'processed' entry.save() + json_processing_callback(entry) except BaseProcessingFail as exc: mark_entry_failed(entry._id, exc) + json_processing_callback(entry) return except ImportError as exc: @@ -70,6 +73,7 @@ class ProcessMedia(Task): exc)) mark_entry_failed(entry._id, exc) + json_processing_callback(entry) except Exception as exc: _log.error('An unhandled exception was raised while' @@ -77,6 +81,7 @@ class ProcessMedia(Task): entry)) mark_entry_failed(entry._id, exc) + json_processing_callback(entry) raise def on_failure(self, exc, task_id, args, kwargs, einfo): @@ -90,3 +95,6 @@ class ProcessMedia(Task): """ entry_id = args[0] mark_entry_failed(entry_id, exc) + + entry = mgg.database.MediaEntry.query.filter_by(id=entry_id) + json_processing_callback(entry) -- cgit v1.2.3 From 939d57a022daa8f1f08dcfd111385225ca6030f6 Mon Sep 17 00:00:00 2001 From: Joar Wandborg Date: Tue, 25 Sep 2012 01:06:25 +0200 Subject: HTTP callback fixes - Added HTTPError catching around the callback request, to not mark the entry as failed, just log the exception. - Fixed bug where I forgot to actually fetch the entry before passing it to json_processing_callback. - Changed __main__ migration #6 to create the ProcessingMetaData table as it is currently, to prevent possible breakage if a siteadmin is lagging behind with his db migrations and more than one migration wants to fix stuff with the ProcessingMetaData table. --- mediagoblin/processing/task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 7f4b8429..187b893d 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -96,5 +96,5 @@ class ProcessMedia(Task): entry_id = args[0] mark_entry_failed(entry_id, exc) - entry = mgg.database.MediaEntry.query.filter_by(id=entry_id) + entry = mgg.database.MediaEntry.query.filter_by(id=entry_id).first() json_processing_callback(entry) -- cgit v1.2.3 From 5f8b4ae895ecb228c5f5d615818ffe0a06a30473 Mon Sep 17 00:00:00 2001 From: Sebastian Spaeth Date: Tue, 4 Dec 2012 09:57:56 +0100 Subject: make media_manager a property of MediaEntry in mixin.py In all cases where get_media_manager(_media_type_as_string) was called in our code base we ultimately passed in a "MediaEntry().media_type" to get the matching MEDIA_MANAGER. It so makes sense to make this a function of the MediaEntry rather than a global function in mediagoblin.media_types and passing around media_entry.media_type as arguments all the time. It saves a few import statements and arguments. I also made it so the Media_manager property is cached for subsequent calls, although I am not too sure that this is needed (there are other cases for which this would make more sense) Also add a get_media_manager test to the media submission tests. It submits an image and checks that both media.media_type and media.media_manager return the right thing. Not sure if these tests could not be merged with an existing submission test, but it can't hurt to have things explicit. TODO: Right now we iterate through all existing media_managers to find the right one based on the string of its module name. This should be made a simple dict lookup to avoid all the extra work. Signed-off-by: Sebastian Spaeth --- mediagoblin/processing/task.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 187b893d..09abebb5 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -20,7 +20,6 @@ from celery.task import Task from mediagoblin import mg_globals as mgg from mediagoblin.db.util import ObjectId -from mediagoblin.media_types import get_media_manager from mediagoblin.processing import mark_entry_failed, BaseProcessingFail from mediagoblin.tools.processing import json_processing_callback @@ -47,14 +46,12 @@ class ProcessMedia(Task): # Try to process, and handle expected errors. try: - manager = get_media_manager(entry.media_type) - entry.state = u'processing' entry.save() _log.debug('Processing {0}'.format(entry)) - manager['processor'](entry) + entry.media_manager['processor'](entry) entry.state = u'processed' entry.save() -- cgit v1.2.3 From 6af6bc05bbd3c54ca242cbda0e74c62b023fe629 Mon Sep 17 00:00:00 2001 From: Christopher Allan Webber Date: Wed, 12 Dec 2012 10:13:56 -0600 Subject: We don't need to save entries during processing... also adding comments explaining such --- mediagoblin/processing/task.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 09abebb5..a8bc0f2f 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -51,8 +51,11 @@ class ProcessMedia(Task): _log.debug('Processing {0}'.format(entry)) + # run the processing code entry.media_manager['processor'](entry) + # We set the state to processed and save the entry here so there's + # no need to save at the end of the processing stage, probably ;) entry.state = u'processed' entry.save() -- cgit v1.2.3 From 5c2b84869fe3f4bfe41a31ff3968bb13c6d7f868 Mon Sep 17 00:00:00 2001 From: Sebastian Spaeth Date: Fri, 30 Nov 2012 10:49:06 +0100 Subject: Move DBModel._id -> DBModel.id We were refering to model._id in most of the code base as this is what Mongo uses. However, each use of _id required a) fixup of queries: e.g. what we did in our find() and find_one() functions moving all '_id' to 'id'. It also required using AliasFields to make the ._id attribute available. This all means lots of superfluous fixing and transitioning in a SQL world. It will also not work in the long run. Much newer code already refers to the objects by model.id (e.g. in the oauth plugin), which will break with Mongo. So let's be honest, rip out the _id mongoism and live with .id as the one canonical way to address objects. This commit modifies all users and providers of model._id to use model.id instead. This patch works with or without Mongo removed first, but will break Mongo usage (even more than before) I have not bothered to fixup db.mongo.* and db.sql.convert (which converts from Mongo to SQL) Signed-off-by: Sebastian Spaeth --- mediagoblin/processing/task.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index a8bc0f2f..06a26bb7 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -42,7 +42,7 @@ class ProcessMedia(Task): (for now just process_image...) """ entry = mgg.database.MediaEntry.one( - {'_id': ObjectId(media_id)}) + {'id': ObjectId(media_id)}) # Try to process, and handle expected errors. try: @@ -61,7 +61,7 @@ class ProcessMedia(Task): json_processing_callback(entry) except BaseProcessingFail as exc: - mark_entry_failed(entry._id, exc) + mark_entry_failed(entry.id, exc) json_processing_callback(entry) return @@ -72,7 +72,7 @@ class ProcessMedia(Task): entry.title, exc)) - mark_entry_failed(entry._id, exc) + mark_entry_failed(entry.id, exc) json_processing_callback(entry) except Exception as exc: @@ -80,7 +80,7 @@ class ProcessMedia(Task): + ' processing {0}'.format( entry)) - mark_entry_failed(entry._id, exc) + mark_entry_failed(entry.id, exc) json_processing_callback(entry) raise -- cgit v1.2.3 From 71717fd5316607500159f782b10ca91cf9684bfd Mon Sep 17 00:00:00 2001 From: Sebastian Spaeth Date: Tue, 25 Dec 2012 20:52:25 +0100 Subject: Remove ObjectId from the tree This was one of the last remaining Mongo holdouts and has been removed from the tree herewith. Good bye, ObjectId. Signed-off-by: Sebastian Spaeth --- mediagoblin/processing/task.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 06a26bb7..b7e761f2 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -19,7 +19,7 @@ import logging from celery.task import Task from mediagoblin import mg_globals as mgg -from mediagoblin.db.util import ObjectId +from mediagoblin.db.sql.models import MediaEntry from mediagoblin.processing import mark_entry_failed, BaseProcessingFail from mediagoblin.tools.processing import json_processing_callback @@ -41,8 +41,7 @@ class ProcessMedia(Task): Pass the media entry off to the appropriate processing function (for now just process_image...) """ - entry = mgg.database.MediaEntry.one( - {'id': ObjectId(media_id)}) + entry = MediaEntry.query.get(media_id) # Try to process, and handle expected errors. try: -- cgit v1.2.3 From b0c8328e547288028e7e43f0ceb1fa9f7c8dac4a Mon Sep 17 00:00:00 2001 From: Sebastian Spaeth Date: Fri, 30 Nov 2012 10:10:35 +0100 Subject: Move db.sql.models* to db.models* --- mediagoblin/processing/task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index b7e761f2..b29de9bd 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -19,7 +19,7 @@ import logging from celery.task import Task from mediagoblin import mg_globals as mgg -from mediagoblin.db.sql.models import MediaEntry +from mediagoblin.db.models import MediaEntry from mediagoblin.processing import mark_entry_failed, BaseProcessingFail from mediagoblin.tools.processing import json_processing_callback -- cgit v1.2.3 From 2cfffd5ed8c054bb60c27ede4e69667f97d12b09 Mon Sep 17 00:00:00 2001 From: Sebastian Spaeth Date: Tue, 15 Jan 2013 14:41:30 +0100 Subject: Make PuSHing the Pubhubsubbub server an async task (#436, #585) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Notifying the PuSH servers had 3 problems.  1) it was done immediately after sending of the processing task to celery. So if celery was run in a separate process we would notify the PuSH servers before the new media was processed/ visible. (#436) 2) Notification code was called in submit/views.py, so submitting via the API never resulted in notifications. (#585) 3) If Notifying the PuSH server failed, we would never retry. The solution was to make the PuSH notification an asynchronous subtask. This way: 1) it will only be called once async processing has finished, 2) it is in the main processing code path, so even API calls will result in notifications, and 3) We retry 3 times in case of failure before giving up. If the server is in a separate process, we will wait 3x 2 minutes before retrying the notification. The only downside is that the celery server needs to have access to the internet to ping the PuSH server. If that is a problem, we need to make the task belong to a special group of celery servers that has access to the internet. As a side effect, I believe I removed the limitation that prevented us from upgrading celery. Signed-off-by: Sebastian Spaeth --- mediagoblin/processing/task.py | 48 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 3 deletions(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index b29de9bd..2cdd5f1a 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -15,8 +15,10 @@ # along with this program. If not, see . import logging +import urllib +import urllib2 -from celery.task import Task +from celery import registry, task from mediagoblin import mg_globals as mgg from mediagoblin.db.models import MediaEntry @@ -28,18 +30,51 @@ logging.basicConfig() _log.setLevel(logging.DEBUG) +@task.task(default_retry_delay=2 * 60) +def handle_push_urls(feed_url): + """Subtask, notifying the PuSH servers of new content + + Retry 3 times every 2 minutes if run in separate process before failing.""" + if not mgg.app_config["push_urls"]: + return # Nothing to do + _log.debug('Notifying Push servers for feed {0}'.format(feed_url)) + hubparameters = { + 'hub.mode': 'publish', + 'hub.url': feed_url} + hubdata = urllib.urlencode(hubparameters) + hubheaders = { + "Content-type": "application/x-www-form-urlencoded", + "Connection": "close"} + for huburl in mgg.app_config["push_urls"]: + hubrequest = urllib2.Request(huburl, hubdata, hubheaders) + try: + hubresponse = urllib2.urlopen(hubrequest) + except (urllib2.HTTPError, urllib2.URLError) as exc: + # We retry by default 3 times before failing + _log.info("PuSH url %r gave error %r", huburl, exc) + try: + return handle_push_urls.retry(exc=exc, throw=False) + except Exception as e: + # All retries failed, Failure is no tragedy here, probably. + _log.warn('Failed to notify PuSH server for feed {0}. ' + 'Giving up.'.format(feed_url)) + return False + ################################ # Media processing initial steps ################################ -class ProcessMedia(Task): +class ProcessMedia(task.Task): """ Pass this entry off for processing. """ - def run(self, media_id): + def run(self, media_id, feed_url): """ Pass the media entry off to the appropriate processing function (for now just process_image...) + + :param feed_url: The feed URL that the PuSH server needs to be + updated for. """ entry = MediaEntry.query.get(media_id) @@ -58,6 +93,9 @@ class ProcessMedia(Task): entry.state = u'processed' entry.save() + # Notify the PuSH servers as async task + handle_push_urls.subtask().delay(feed_url) + json_processing_callback(entry) except BaseProcessingFail as exc: mark_entry_failed(entry.id, exc) @@ -97,3 +135,7 @@ class ProcessMedia(Task): entry = mgg.database.MediaEntry.query.filter_by(id=entry_id).first() json_processing_callback(entry) + +# Register the task +process_media = registry.tasks[ProcessMedia.name] + -- cgit v1.2.3 From c7b3d070b65a84e3bfa9d8e3e6f52aac6552910f Mon Sep 17 00:00:00 2001 From: Sebastian Spaeth Date: Tue, 15 Jan 2013 15:03:00 +0100 Subject: Don't pass request into run_process_media People(tm) want to start run_process_media from the CLI and might not have a request object handy. So pass in the feed_url into run_process_media rather than the request object and allow the feed url to be empty (resulting in no PuSH notification at all then). Signed-off-by: Sebastian Spaeth --- mediagoblin/processing/task.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 2cdd5f1a..e9bbe084 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -94,7 +94,8 @@ class ProcessMedia(task.Task): entry.save() # Notify the PuSH servers as async task - handle_push_urls.subtask().delay(feed_url) + if mgg.app_config["push_urls"] and feed_url: + handle_push_urls.subtask().delay(feed_url) json_processing_callback(entry) except BaseProcessingFail as exc: -- cgit v1.2.3 From 93b14fc300618e8b2c4ebfd54b9c59369ce0f417 Mon Sep 17 00:00:00 2001 From: Elrond Date: Wed, 23 Jan 2013 19:44:28 +0100 Subject: Implement ProcessingState class and use for images The idea is to have a class that has the knowledge of the currently being processed media and also has tools for that. The long term idea is to make reprocessing easier by for example hiding the way the original comes into the processing code. --- mediagoblin/processing/task.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index e9bbe084..8614c673 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -22,7 +22,7 @@ from celery import registry, task from mediagoblin import mg_globals as mgg from mediagoblin.db.models import MediaEntry -from mediagoblin.processing import mark_entry_failed, BaseProcessingFail +from . import mark_entry_failed, BaseProcessingFail, ProcessingState from mediagoblin.tools.processing import json_processing_callback _log = logging.getLogger(__name__) @@ -85,8 +85,11 @@ class ProcessMedia(task.Task): _log.debug('Processing {0}'.format(entry)) - # run the processing code - entry.media_manager['processor'](entry) + proc_state = ProcessingState(entry) + with mgg.workbench_manager.create() as workbench: + proc_state.set_workbench(workbench) + # run the processing code + entry.media_manager['processor'](entry) # We set the state to processed and save the entry here so there's # no need to save at the end of the processing stage, probably ;) -- cgit v1.2.3 From fb46fa663dbd80a66a3a5995dfda730dd3fd52a4 Mon Sep 17 00:00:00 2001 From: Elrond Date: Sat, 26 Jan 2013 15:28:24 +0100 Subject: Kill monkeypatching of ProcessingState. And change the process_foo() API to accept a processingstate now. image and video are tested, the others are UNTESTED. --- mediagoblin/processing/task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index 8614c673..aec50aab 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -89,7 +89,7 @@ class ProcessMedia(task.Task): with mgg.workbench_manager.create() as workbench: proc_state.set_workbench(workbench) # run the processing code - entry.media_manager['processor'](entry) + entry.media_manager['processor'](proc_state) # We set the state to processed and save the entry here so there's # no need to save at the end of the processing stage, probably ;) -- cgit v1.2.3 From e8676fa383975b37ac26e0908ec35da44afbc30a Mon Sep 17 00:00:00 2001 From: Elrond Date: Fri, 8 Mar 2013 20:51:32 +0100 Subject: MediaManager: Use .foo instead of ['foo']. To make .media_fetch_order work, create a property. --- mediagoblin/processing/task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'mediagoblin/processing/task.py') diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py index aec50aab..9af192ed 100644 --- a/mediagoblin/processing/task.py +++ b/mediagoblin/processing/task.py @@ -89,7 +89,7 @@ class ProcessMedia(task.Task): with mgg.workbench_manager.create() as workbench: proc_state.set_workbench(workbench) # run the processing code - entry.media_manager['processor'](proc_state) + entry.media_manager.processor(proc_state) # We set the state to processed and save the entry here so there's # no need to save at the end of the processing stage, probably ;) -- cgit v1.2.3