aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/processing
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/processing')
-rw-r--r--mediagoblin/processing/__init__.py6
-rw-r--r--mediagoblin/processing/task.py19
2 files changed, 12 insertions, 13 deletions
diff --git a/mediagoblin/processing/__init__.py b/mediagoblin/processing/__init__.py
index 6b2d50e2..e2bc1a13 100644
--- a/mediagoblin/processing/__init__.py
+++ b/mediagoblin/processing/__init__.py
@@ -38,7 +38,7 @@ class ProgressCallback(object):
def create_pub_filepath(entry, filename):
return mgg.public_store.get_unique_filepath(
['media_entries',
- unicode(entry._id),
+ unicode(entry.id),
filename])
@@ -93,7 +93,7 @@ def mark_entry_failed(entry_id, exc):
# Looks like yes, so record information about that failure and any
# metadata the user might have supplied.
atomic_update(mgg.database.MediaEntry,
- {'_id': entry_id},
+ {'id': entry_id},
{u'state': u'failed',
u'fail_error': unicode(exc.exception_path),
u'fail_metadata': exc.metadata})
@@ -104,7 +104,7 @@ def mark_entry_failed(entry_id, exc):
# metadata (in fact overwrite it if somehow it had previous info
# here)
atomic_update(mgg.database.MediaEntry,
- {'_id': entry_id},
+ {'id': entry_id},
{u'state': u'failed',
u'fail_error': None,
u'fail_metadata': {}})
diff --git a/mediagoblin/processing/task.py b/mediagoblin/processing/task.py
index 187b893d..b29de9bd 100644
--- a/mediagoblin/processing/task.py
+++ b/mediagoblin/processing/task.py
@@ -19,8 +19,7 @@ import logging
from celery.task import Task
from mediagoblin import mg_globals as mgg
-from mediagoblin.db.util import ObjectId
-from mediagoblin.media_types import get_media_manager
+from mediagoblin.db.models import MediaEntry
from mediagoblin.processing import mark_entry_failed, BaseProcessingFail
from mediagoblin.tools.processing import json_processing_callback
@@ -42,26 +41,26 @@ class ProcessMedia(Task):
Pass the media entry off to the appropriate processing function
(for now just process_image...)
"""
- entry = mgg.database.MediaEntry.one(
- {'_id': ObjectId(media_id)})
+ entry = MediaEntry.query.get(media_id)
# Try to process, and handle expected errors.
try:
- manager = get_media_manager(entry.media_type)
-
entry.state = u'processing'
entry.save()
_log.debug('Processing {0}'.format(entry))
- manager['processor'](entry)
+ # run the processing code
+ entry.media_manager['processor'](entry)
+ # We set the state to processed and save the entry here so there's
+ # no need to save at the end of the processing stage, probably ;)
entry.state = u'processed'
entry.save()
json_processing_callback(entry)
except BaseProcessingFail as exc:
- mark_entry_failed(entry._id, exc)
+ mark_entry_failed(entry.id, exc)
json_processing_callback(entry)
return
@@ -72,7 +71,7 @@ class ProcessMedia(Task):
entry.title,
exc))
- mark_entry_failed(entry._id, exc)
+ mark_entry_failed(entry.id, exc)
json_processing_callback(entry)
except Exception as exc:
@@ -80,7 +79,7 @@ class ProcessMedia(Task):
+ ' processing {0}'.format(
entry))
- mark_entry_failed(entry._id, exc)
+ mark_entry_failed(entry.id, exc)
json_processing_callback(entry)
raise