diff --git a/localtv/admin/forms.py b/localtv/admin/forms.py index 72778941..72370ddd 100644 --- a/localtv/admin/forms.py +++ b/localtv/admin/forms.py @@ -21,7 +21,7 @@ from localtv import models, utils from localtv.settings import API_KEYS -from localtv.tasks import video_save_thumbnail, feed_update, CELERY_USING +from localtv.tasks import video_save_thumbnail, feed_update from localtv.user_profile import forms as user_profile_forms from vidscraper import auto_feed @@ -66,8 +66,7 @@ def save(self, commit=True): if (thumbnail_url and not models.Video.objects.get(id=self.instance.id).thumbnail_url == thumbnail_url): self.instance.thumbnail_url = thumbnail_url - video_save_thumbnail.delay(self.instance.pk, - using=CELERY_USING) + video_save_thumbnail.delay(self.instance.pk) return forms.ModelForm.save(self, commit=commit) @@ -1002,7 +1001,6 @@ def save(self, commit=True): self.instance.name = self.instance.feed_url instance = super(AddFeedForm, self).save(commit) feed_update.delay(instance.pk, - using=CELERY_USING, clear_rejected=True) return instance diff --git a/localtv/management/commands/update_popularity.py b/localtv/management/commands/update_popularity.py index 631157f0..c888a7e8 100644 --- a/localtv/management/commands/update_popularity.py +++ b/localtv/management/commands/update_popularity.py @@ -18,10 +18,9 @@ def handle(self, **options): return since = options['since'] - from localtv.tasks import haystack_batch_update, CELERY_USING + from localtv.tasks import haystack_batch_update haystack_batch_update.delay(Video._meta.app_label, Video._meta.module_name, start=datetime.now() - timedelta(since), - date_lookup='watch__timestamp', - using=CELERY_USING) + date_lookup='watch__timestamp') diff --git a/localtv/management/commands/update_sources.py b/localtv/management/commands/update_sources.py index aec87cd3..aa3c9983 100644 --- a/localtv/management/commands/update_sources.py +++ b/localtv/management/commands/update_sources.py @@ -7,6 +7,6 @@ def handle_noargs(self, **options): if site_too_old(): return - from localtv.tasks import update_sources, CELERY_USING - - update_sources.delay(using=CELERY_USING) + from localtv.tasks import update_sources + + update_sources.delay() diff --git a/localtv/models.py b/localtv/models.py index 24ced64b..26630311 100644 --- a/localtv/models.py +++ b/localtv/models.py @@ -27,7 +27,7 @@ from django.utils.html import escape as html_escape from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _ -from haystack import connections +from haystack import connections, connection_router from mptt.models import MPTTModel from notification import models as notification from slugify import slugify @@ -175,8 +175,7 @@ class WidgetSettingsManager(SiteRelatedManager): def _new_entry(self, site, using): ws = super(WidgetSettingsManager, self)._new_entry(site, using) try: - site_settings = SiteSettings._default_manager.db_manager( - using).get(site=site) + site_settings = SiteSettings.objects.get_cached(site, using) except SiteSettings.DoesNotExist: pass else: @@ -278,8 +277,7 @@ class Source(Thumbnailable): class Meta: abstract = True - def update(self, video_iter, source_import, using='default', - clear_rejected=False): + def update(self, video_iter, source_import, clear_rejected=False): """ Imports videos from a feed/search. `videos` is an iterable which returns :class:`vidscraper.videos.Video` objects. We use @@ -312,28 +310,25 @@ def update(self, video_iter, source_import, using='default', status=Video.PENDING, author_pks=author_pks, category_pks=category_pks, - clear_rejected=clear_rejected, - using=using) + clear_rejected=clear_rejected) except Exception: source_import.handle_error( 'Import task creation failed for %r' % ( vidscraper_video.url,), is_skip=True, - with_exception=True, - using=using) + with_exception=True) except Exception: - source_import.fail(with_exception=True, using=using) + source_import.fail(with_exception=True) return - source_import.__class__._default_manager.using(using).filter( + source_import.__class__._default_manager.filter( pk=source_import.pk ).update( total_videos=total_videos ) mark_import_pending.delay(import_app_label=import_opts.app_label, import_model=import_opts.module_name, - import_pk=source_import.pk, - using=using) + import_pk=source_import.pk) class Feed(Source): @@ -392,21 +387,21 @@ def __unicode__(self): def get_absolute_url(self): return ('localtv_list_feed', [self.pk]) - def update(self, using='default', **kwargs): + def update(self, **kwargs): """ Fetch and import new videos from this feed. """ try: - FeedImport.objects.using(using).get(source=self, - status=FeedImport.STARTED) + FeedImport.objects.get(source=self, + status=FeedImport.STARTED) except FeedImport.DoesNotExist: pass else: logging.info('Skipping import of %s: already in progress' % self) return - feed_import = FeedImport.objects.db_manager(using).create(source=self, + feed_import = FeedImport.objects.create(source=self, auto_approve=self.auto_approve) video_iter = vidscraper.auto_feed( @@ -419,7 +414,7 @@ def update(self, using='default', **kwargs): video_iter.load() except Exception: feed_import.fail("Data loading failed for {source}", - with_exception=True, using=using) + with_exception=True) return self.etag = getattr(video_iter, 'etag', None) or '' @@ -437,7 +432,7 @@ def update(self, using='default', **kwargs): self.save() super(Feed, self).update(video_iter, source_import=feed_import, - using=using, **kwargs) + **kwargs) def source_type(self): return self.calculated_source_type @@ -460,7 +455,7 @@ def pre_save_set_calculated_source_type(instance, **kwargs): instance.calculated_source_type = instance._calculate_source_type() # Plus, if the name changed, we have to recalculate all the Videos that depend on us. try: - v = Feed.objects.using(instance._state.db).get(id=instance.id) + v = Feed.objects.get(id=instance.id) except Feed.DoesNotExist: return instance if v.name != instance.name: @@ -581,21 +576,21 @@ class SavedSearch(Source): def __unicode__(self): return self.query_string - def update(self, using='default', **kwargs): + def update(self, **kwargs): """ Fetch and import new videos from this search. """ try: - SearchImport.objects.using(using).get(source=self, - status=SearchImport.STARTED) + SearchImport.objects.get(source=self, + status=SearchImport.STARTED) except SearchImport.DoesNotExist: pass else: logging.info('Skipping import of %s: already in progress' % self) return - search_import = SearchImport.objects.db_manager(using).create( + search_import = SearchImport.objects.create( source=self, auto_approve=self.auto_approve ) @@ -613,18 +608,18 @@ def update(self, using='default', **kwargs): except Exception: search_import.handle_error(u'Skipping import of search results ' u'from %s' % video_iter.__class__.__name__, - with_exception=True, using=using) + with_exception=True) continue video_iters.append(video_iter) if video_iters: super(SavedSearch, self).update(itertools.chain(*video_iters), source_import=search_import, - using=using, **kwargs) + **kwargs) else: # Mark the import as failed if none of the searches could load. search_import.fail("All searches failed for {source}", - with_exception=False, using=using) + with_exception=False) def source_type(self): return u'Search' @@ -708,11 +703,10 @@ def set_video_source(self, video): """ raise NotImplementedError - def get_videos(self, using='default'): + def get_videos(self): raise NotImplementedError - def handle_error(self, message, is_skip=False, with_exception=False, - using='default'): + def handle_error(self, message, is_skip=False, with_exception=False): """ Logs the error with the default logger and to the database. @@ -732,12 +726,12 @@ def handle_error(self, message, is_skip=False, with_exception=False, else: logging.warn(message) tb = '' - self.errors.db_manager(using).create(message=message, - source_import=self, - traceback=tb, - is_skip=is_skip) + self.errors.create(message=message, + source_import=self, + traceback=tb, + is_skip=is_skip) if is_skip: - self.__class__._default_manager.using(using).filter(pk=self.pk + self.__class__._default_manager.filter(pk=self.pk ).update(videos_skipped=models.F('videos_skipped') + 1) def get_index_creation_kwargs(self, video, vidscraper_video): @@ -747,7 +741,7 @@ def get_index_creation_kwargs(self, video, vidscraper_video): 'index': vidscraper_video.index } - def handle_video(self, video, vidscraper_video, using='default'): + def handle_video(self, video, vidscraper_video): """ Creates an index instance connecting the video to this import. @@ -756,13 +750,12 @@ def handle_video(self, video, vidscraper_video, using='default'): :param using: The database alias to use. Default: 'default' """ - self.indexes.db_manager(using).create( + self.indexes.create( **self.get_index_creation_kwargs(video, vidscraper_video)) - self.__class__._default_manager.using(using).filter(pk=self.pk + self.__class__._default_manager.filter(pk=self.pk ).update(videos_imported=models.F('videos_imported') + 1) - def fail(self, message="Import failed for {source}", with_exception=False, - using='default'): + def fail(self, message="Import failed for {source}", with_exception=False): """ Mark an import as failed, along with some post-fail cleanup. @@ -771,8 +764,8 @@ def fail(self, message="Import failed for {source}", with_exception=False, self.last_activity = datetime.datetime.now() self.save() self.handle_error(message.format(source=self.source), - with_exception=with_exception, using=using) - self.get_videos(using).delete() + with_exception=with_exception) + self.get_videos().delete() class FeedImport(SourceImport): @@ -781,9 +774,8 @@ class FeedImport(SourceImport): def set_video_source(self, video): video.feed_id = self.source_id - def get_videos(self, using='default'): - return Video.objects.using(using).filter( - feedimportindex__source_import=self) + def get_videos(self): + return Video.objects.filter(feedimportindex__source_import=self) class SearchImport(SourceImport): @@ -792,9 +784,8 @@ class SearchImport(SourceImport): def set_video_source(self, video): video.search_id = self.source_id - def get_videos(self, using='default'): - return Video.objects.using(using).filter( - searchimportindex__source_import=self) + def get_videos(self): + return Video.objects.filter(searchimportindex__source_import=self) class Video(Thumbnailable): @@ -915,7 +906,7 @@ def _check_for_duplicates(self, exclude_rejected=True): if not self.embed_code and not self.file_url: raise ValidationError("Video has no embed code or file url.") - qs = Video.objects.using(self._state.db).filter(site=self.site_id) + qs = Video.objects.filter(site=self.site_id) if exclude_rejected: qs = qs.exclude(status=Video.REJECTED) @@ -952,7 +943,7 @@ def clear_rejected_duplicates(self): q_filter |= models.Q(file_url=self.file_url) if self.guid: q_filter |= models.Q(guid=self.guid) - qs = Video.objects.using(self._state.db).filter( + qs = Video.objects.filter( site=self.site_id, status=Video.REJECTED).filter(q_filter) qs.delete() @@ -978,8 +969,8 @@ def save(self, **kwargs): @classmethod def from_vidscraper_video(cls, video, status=None, commit=True, - using='default', source_import=None, site_pk=None, - authors=None, categories=None, update_index=True): + source_import=None, site_pk=None, authors=None, + categories=None, update_index=True): """ Builds a :class:`Video` instance from a :class:`vidscraper.videos.Video` instance. If `commit` is False, @@ -1043,15 +1034,15 @@ def save_m2m(): first, last = name.split(' ', 1) else: first, last = name, '' - author, created = User.objects.db_manager(using).get_or_create( + author, created = User.objects.get_or_create( username=name[:30], defaults={'first_name': first[:30], 'last_name': last[:30]}) if created: author.set_unusable_password() author.save() - utils.get_profile_model()._default_manager.db_manager(using - ).create(user=author, website=video.user_url or '') + utils.get_profile_model()._default_manager.create( + user=author, website=video.user_url or '') instance.authors.add(author) if categories: instance.categories = categories @@ -1063,24 +1054,22 @@ def save_m2m(): tags = set(fix(tag) for tag in video.tags if tag.strip()) for tag_name in tags: tag, created = \ - tagging.models.Tag._default_manager.db_manager( - using).get_or_create(name=tag_name) - tagging.models.TaggedItem._default_manager.db_manager( - using).create( + tagging.models.Tag._default_manager.get_or_create(name=tag_name) + tagging.models.TaggedItem._default_manager.create( tag=tag, object=instance) if source_import is not None: - source_import.handle_video(instance, video, using) + source_import.handle_video(instance, video) post_video_from_vidscraper.send(sender=cls, instance=instance, - vidscraper_video=video, using=using) + vidscraper_video=video) if update_index: + using = connection_router.for_write() index = connections[using].get_unified_index().get_index(cls) index._enqueue_update(instance) if commit: - instance.save(using=using, update_index=False) + instance.save(update_index=False) save_m2m() else: - instance._state.db = using instance.save_m2m = save_m2m return instance @@ -1211,7 +1200,7 @@ def all_categories(self): l = Category._tree_manager._translate_lookups(**l) q_list.append(models.Q(**l)) q = reduce(operator.or_, q_list) - return Category.objects.using(self._state.db).filter(q) + return Category.objects.filter(q) def pre_save_video_set_calculated_source_type(instance, **kwargs): @@ -1364,7 +1353,7 @@ def tag_unicode(self): def send_new_video_email(sender, **kwargs): site_settings = SiteSettings.objects.get_cached(site=sender.site_id, - using=sender._state.db) + using=sender._state.db) if sender.status == Video.ACTIVE: # don't send the e-mail for videos that are already active return @@ -1374,8 +1363,8 @@ def send_new_video_email(sender, **kwargs): subject = '[%s] New Video in Review Queue: %s' % (sender.site.name, sender) utils.send_notice('admin_new_submission', - subject, message, - site_settings=site_settings) + subject, message, + site_settings=site_settings) submit_finished.connect(send_new_video_email, weak=False) @@ -1430,7 +1419,7 @@ def create_email_notices(app, created_models, verbosity, **kwargs): def delete_comments(sender, instance, **kwargs): from django.contrib.comments import get_model - get_model().objects.using(instance._state.db).filter( + get_model().objects.filter( object_pk=instance.pk, content_type__app_label='localtv', content_type__model='video' diff --git a/localtv/search_indexes.py b/localtv/search_indexes.py index ce393c0e..a616c8e1 100644 --- a/localtv/search_indexes.py +++ b/localtv/search_indexes.py @@ -1,22 +1,16 @@ from datetime import datetime, timedelta -from django.conf import settings from django.contrib.auth.models import User -from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site from django.db.models import signals -from django.template import loader from haystack import indexes from haystack.query import SearchQuerySet -from tagging.models import Tag from localtv.models import Video, Feed, SavedSearch from localtv.playlists.models import PlaylistItem from localtv.tasks import haystack_update, haystack_remove -CELERY_USING = getattr(settings, 'LOCALTV_CELERY_USING', 'default') - #: We use a placeholder value because support for filtering on null values is #: lacking. We use January 1st, 1900 because Whoosh doesn't support datetime #: values earlier than that, but we want to keep the videos with no value @@ -28,7 +22,7 @@ class QueuedSearchIndex(indexes.SearchIndex): def _setup_save(self): signals.post_save.connect(self._enqueue_update, - sender=self.get_model()) + sender=self.get_model()) def _setup_delete(self): signals.post_delete.connect(self._enqueue_removal, @@ -36,7 +30,7 @@ def _setup_delete(self): def _teardown_save(self): signals.post_save.disconnect(self._enqueue_update, - sender=self.get_model()) + sender=self.get_model()) def _teardown_delete(self): signals.post_delete.connect(self._enqueue_removal, @@ -49,19 +43,9 @@ def _enqueue_removal(self, instance, **kwargs): self._enqueue_instance(instance, haystack_remove) def _enqueue_instance(self, instance, task): - self._enqueue(instance._meta.app_label, - instance._meta.module_name, - [instance.pk], task, - using=instance._state.db) - - def _enqueue(self, app_label, model_name, pks, task, using='default'): - if using == 'default': - # This gets called from both Celery and from the MC application. - # If we're in the web app, `using` is generally 'default', so we - # need to use CELERY_USING as our database. If they're the same, - # or we're not using separate databases, this is a no-op. - using = CELERY_USING - task.delay(app_label, model_name, pks, using=using) + task.delay(instance._meta.app_label, + instance._meta.module_name, + [instance.pk]) class VideoIndex(QueuedSearchIndex, indexes.Indexable): @@ -145,29 +129,9 @@ def _enqueue_fk_delete(self, instance, **kwargs): sqs = SearchQuerySet().models(self.get_model()).filter( **{field_name: instance.pk}) pks = [r.pk for r in sqs] - self._enqueue(Video._meta.app_label, - Video._meta.module_name, - pks, haystack_remove, - using=instance._state.db) - - def prepare(self, obj): - """ - Disable uploadtemplate loader - it always uses the default database. - This is a trailing necessity of the CELERY_USING hack. - - """ - if 'uploadtemplate.loader.Loader' in settings.TEMPLATE_LOADERS: - old_template_loaders = settings.TEMPLATE_LOADERS - loader.template_source_loaders = None - settings.TEMPLATE_LOADERS = tuple(loader - for loader in settings.TEMPLATE_LOADERS - if loader != 'uploadtemplate.loader.Loader') - super(VideoIndex, self).prepare(obj) - loader.template_source_loaders = None - settings.TEMPLATE_LOADERS = old_template_loaders - else: - super(VideoIndex, self).prepare(obj) - return self.prepared_data + haystack_remove.delay(Video._meta.app_label, + Video._meta.module_name, + pks) def get_model(self): return Video @@ -198,13 +162,7 @@ def _prepare_rel_field(self, video, field): return [int(rel.pk) for rel in getattr(video, field).all()] def prepare_tags(self, video): - # We manually run this process to be sure that the tags are fetched - # from the correct database (not just "default"). - using = video._state.db - ct = ContentType.objects.db_manager(using).get_for_model(video) - tags = Tag.objects.using(using).filter(items__content_type__pk=ct.pk, - items__object_id=video.pk) - return [int(tag.pk) for tag in tags] + return [int(tag.pk) for tag in video.tags] def prepare_categories(self, video): return [int(rel.pk) for rel in video.all_categories] diff --git a/localtv/submit_video/forms.py b/localtv/submit_video/forms.py index 9843af87..335c018e 100644 --- a/localtv/submit_video/forms.py +++ b/localtv/submit_video/forms.py @@ -11,7 +11,7 @@ from localtv.models import Video, SiteSettings from localtv.settings import API_KEYS -from localtv.tasks import video_save_thumbnail, CELERY_USING +from localtv.tasks import video_save_thumbnail from localtv.templatetags.filters import sanitize @@ -149,7 +149,7 @@ def save_m2m(): instance.save_m2m() if instance.thumbnail_url and not instance.thumbnail: - video_save_thumbnail.delay(instance.pk, using=CELERY_USING) + video_save_thumbnail.delay(instance.pk) if self.cleaned_data.get('tags'): instance.tags = self.cleaned_data['tags'] diff --git a/localtv/tasks.py b/localtv/tasks.py index 69563e4c..26a23cf4 100644 --- a/localtv/tasks.py +++ b/localtv/tasks.py @@ -7,7 +7,6 @@ from celery.exceptions import MaxRetriesExceededError from celery.task import task from daguerre.utils import make_hash, KEEP_FORMATS, DEFAULT_FORMAT -from django.conf import settings from django.core.exceptions import ValidationError from django.core.files.base import File from django.core.files.temp import NamedTemporaryFile @@ -15,7 +14,7 @@ from django.db.models import Q from django.db.models.loading import get_model from django.contrib.auth.models import User -from haystack import connections +from haystack import connection_router, connections from haystack.query import SearchQuerySet from vidscraper.videos import Video as VidscraperVideo try: @@ -27,11 +26,6 @@ class DummyException(Exception): """ Dummy exception; nothing raises me. """ - -try: - from xapian import DatabaseError -except ImportError: - DatabaseError = DummyException try: from whoosh.store import LockError except ImportError: @@ -43,49 +37,45 @@ class DummyException(Exception): from localtv.utils import quote_unicode_url -CELERY_USING = getattr(settings, 'LOCALTV_CELERY_USING', 'default') - - @task(ignore_result=True) -def update_sources(using='default'): - feeds = Feed.objects.using(using).filter(status=Feed.ACTIVE, - auto_update=True) +def update_sources(): + feeds = Feed.objects.filter(status=Feed.ACTIVE, + auto_update=True) for feed_pk in feeds.values_list('pk', flat=True): - feed_update.delay(feed_pk, using=using) + feed_update.delay(feed_pk) - searches = SavedSearch.objects.using(using).filter(auto_update=True) + searches = SavedSearch.objects.filter(auto_update=True) for search_pk in searches.values_list('pk', flat=True): - search_update.delay(search_pk, using=using) + search_update.delay(search_pk) @task(ignore_result=True) -def feed_update(feed_id, using='default', clear_rejected=False): +def feed_update(feed_id, clear_rejected=False): try: - feed = Feed.objects.using(using).filter(auto_update=True + feed = Feed.objects.filter(auto_update=True ).get(pk=feed_id) except Feed.DoesNotExist: - logging.warn('feed_update(%s, using=%r) could not find feed', - feed_id, using) + logging.warn('feed_update(%s) could not find feed', + feed_id) return - feed.update(using=using, clear_rejected=clear_rejected) + feed.update(clear_rejected=clear_rejected) @task(ignore_result=True) -def search_update(search_id, using='default'): +def search_update(search_id): try: - search = SavedSearch.objects.using(using).filter(auto_update=True - ).get(pk=search_id) + search = SavedSearch.objects.filter(auto_update=True + ).get(pk=search_id) except SavedSearch.DoesNotExist: - logging.warn('search_update(%s, using=%r) could not find search', - search_id, using) + logging.warn('search_update(%s) could not find search', + search_id) return - search.update(using=using, clear_rejected=True) + search.update(clear_rejected=True) @task(ignore_result=True, max_retries=None, default_retry_delay=30) -def mark_import_pending(import_app_label, import_model, import_pk, - using='default'): +def mark_import_pending(import_app_label, import_model, import_pk): """ Checks whether an import's first stage is complete. If it's not, retries the task with a countdown of 30. @@ -93,7 +83,7 @@ def mark_import_pending(import_app_label, import_model, import_pk, """ import_class = get_model(import_app_label, import_model) try: - source_import = import_class._default_manager.using(using).get( + source_import = import_class._default_manager.get( pk=import_pk, status=import_class.STARTED) except import_class.DoesNotExist: @@ -125,7 +115,7 @@ def mark_import_pending(import_app_label, import_model, import_pk, # Otherwise the first stage is complete. Check whether they can take all # the videos. if source_import.auto_approve: - active_set = source_import.get_videos(using).filter( + active_set = source_import.get_videos().filter( status=Video.PENDING) for receiver, response in pre_mark_as_active.send_robust( @@ -139,27 +129,24 @@ def mark_import_pending(import_app_label, import_model, import_pk, active_set.update(status=Video.ACTIVE) - source_import.get_videos(using).filter(status=Video.PENDING).update( + source_import.get_videos().filter(status=Video.PENDING).update( status=Video.UNAPPROVED) source_import.status = import_class.PENDING source_import.save() - active_pks = source_import.get_videos(using).filter( + active_pks = source_import.get_videos().filter( status=Video.ACTIVE).values_list('pk', flat=True) if active_pks: opts = Video._meta haystack_batch_update.delay(opts.app_label, opts.module_name, - pks=list(active_pks), remove=False, - using=using) + pks=list(active_pks), remove=False) - mark_import_complete.delay(import_app_label, import_model, import_pk, - using=using) + mark_import_complete.delay(import_app_label, import_model, import_pk) @task(ignore_result=True, max_retries=None, default_retry_delay=30) -def mark_import_complete(import_app_label, import_model, import_pk, - using='default'): +def mark_import_complete(import_app_label, import_model, import_pk): """ Checks whether an import's second stage is complete. If it's not, retries the task with a countdown of 30. @@ -167,7 +154,7 @@ def mark_import_complete(import_app_label, import_model, import_pk, """ import_class = get_model(import_app_label, import_model) try: - source_import = import_class._default_manager.using(using).get( + source_import = import_class._default_manager.get( pk=import_pk, status=import_class.PENDING) except import_class.DoesNotExist: @@ -181,28 +168,23 @@ def mark_import_complete(import_app_label, import_model, import_pk, if not USE_HAYSTACK: # No need to do any comparisons - just mark it complete. video_count = haystack_count = 0 - logging.debug(('mark_import_complete(%s, %s, %i, using=%s). Skipping ' + logging.debug(('mark_import_complete(%s, %s, %i). Skipping ' 'check because haystack is disabled.'), import_app_label, - import_model, import_pk, using) + import_model, import_pk) else: - video_pks = list(source_import.get_videos(using).filter( + video_pks = list(source_import.get_videos().filter( status=Video.ACTIVE).values_list('pk', flat=True)) video_count = len(video_pks) if not video_pks: # Don't bother with the haystack query. haystack_count = 0 else: - if 'xapian' in connections[using].options['ENGINE']: - # The pk_hack field shadows the model's pk/django_id because - # xapian-haystack's django_id filtering is broken. - haystack_filter = {'pk_hack__in': video_pks} - else: - haystack_filter = {'django_id__in': video_pks} - haystack_count = SearchQuerySet().using(using).models(Video).filter( + haystack_filter = {'django_id__in': video_pks} + haystack_count = SearchQuerySet().models(Video).filter( **haystack_filter).count() - logging.debug(('mark_import_complete(%s, %s, %i, using=%s). video_count: ' + logging.debug(('mark_import_complete(%s, %s, %i). video_count: ' '%i, haystack_count: %i'), import_app_label, import_model, - import_pk, using, video_count, haystack_count) + import_pk, video_count, haystack_count) if haystack_count >= video_count: source_import.status = import_class.COMPLETE @@ -221,12 +203,11 @@ def mark_import_complete(import_app_label, import_model, import_pk, def video_from_vidscraper_video(video_dict, site_pk, import_app_label=None, import_model=None, import_pk=None, status=None, author_pks=None, - category_pks=None, clear_rejected=False, - using='default'): + category_pks=None, clear_rejected=False): vidscraper_video = VidscraperVideo.deserialize(video_dict, API_KEYS) import_class = get_model(import_app_label, import_model) try: - source_import = import_class.objects.using(using).get( + source_import = import_class.objects.get( pk=import_pk, status=import_class.STARTED) except import_class.DoesNotExist: @@ -241,22 +222,20 @@ def video_from_vidscraper_video(video_dict, site_pk, source_import.handle_error( ('Skipped %r: Could not load video data.' % vidscraper_video.url), - using=using, is_skip=True, - with_exception=True) + is_skip=True, with_exception=True) return if category_pks: - categories = Category.objects.using(using).filter(pk__in=category_pks) + categories = Category.objects.filter(pk__in=category_pks) else: categories = None if author_pks: - authors = User.objects.using(using).filter(pk__in=author_pks) + authors = User.objects.filter(pk__in=author_pks) else: authors = None video = Video.from_vidscraper_video(vidscraper_video, status=status, - using=using, source_import=source_import, authors=authors, categories=categories, @@ -274,7 +253,7 @@ def video_from_vidscraper_video(video_dict, site_pk, except ValidationError, e: source_import.handle_error(("Skipping %r: %r" % ( vidscraper_video.url, e.message)), - is_skip=True, using=using) + is_skip=True) return else: video.save(update_index=False) @@ -288,22 +267,21 @@ def video_from_vidscraper_video(video_dict, site_pk, logging.debug('Made video %i: %r', video.pk, video.name) if video.thumbnail_url: - video_save_thumbnail.delay(video.pk, using=using) + video_save_thumbnail.delay(video.pk) except Exception: source_import.handle_error(('Unknown error during import of %r' % vidscraper_video.url), - is_skip=True, using=using, - with_exception=True) + is_skip=True, with_exception=True) raise # so it shows up in the Celery log @task(ignore_result=True) -def video_save_thumbnail(video_pk, using='default'): +def video_save_thumbnail(video_pk): try: - video = Video.objects.using(using).get(pk=video_pk) + video = Video.objects.get(pk=video_pk) except Video.DoesNotExist: logging.warn( - 'video_save_thumbnail(%s, using=%r) could not find video', - video_pk, using) + 'video_save_thumbnail(%s) could not find video', + video_pk) return if not video.thumbnail_url: @@ -315,7 +293,7 @@ def video_save_thumbnail(video_pk, using='default'): remote_file = urllib.urlopen(thumbnail_url) except httplib.InvalidURL: # If the URL isn't valid, erase it. - Video.objects.using(using).filter(pk=video.pk + Video.objects.filter(pk=video.pk ).update(thumbnail_url='') return @@ -340,7 +318,7 @@ def video_save_thumbnail(video_pk, using='default'): im.verify() except Exception: # If the file isn't valid, erase the url. - Video.objects.using(using).filter(pk=video.pk + Video.objects.filter(pk=video.pk ).update(thumbnail_url='') return @@ -354,7 +332,7 @@ def video_save_thumbnail(video_pk, using='default'): # to avoid overwriting other changes that might have happened # simultaneously. final_path = default_storage.save(storage_path, File(temp)) - Video.objects.using(using).filter(pk=video.pk + Video.objects.filter(pk=video.pk ).update(thumbnail=final_path) remote_file.close() temp.close() @@ -368,7 +346,7 @@ def _haystack_database_retry(task, callback): """ try: callback() - except (DatabaseError, LockError), e: + except LockError, e: # These errors might be resolved if we just wait a bit. The wait time is # slightly random, with the intention of preventing LockError retries # from reoccurring. Maximum wait is ~30s. @@ -381,7 +359,7 @@ def _haystack_database_retry(task, callback): @task(ignore_result=True, max_retries=None) -def haystack_update(app_label, model_name, pks, remove=True, using='default'): +def haystack_update(app_label, model_name, pks, remove=True): """ Updates the haystack records for any valid instances with the given pks. Generally, ``remove`` should be ``True`` so that items which are no longer @@ -391,10 +369,11 @@ def haystack_update(app_label, model_name, pks, remove=True, using='default'): """ model_class = get_model(app_label, model_name) + using = connection_router.for_write() backend = connections[using].get_backend() index = connections[using].get_unified_index().get_index(model_class) - qs = index.index_queryset().using(using).filter(pk__in=pks) + qs = index.index_queryset().filter(pk__in=pks) if qs: _haystack_database_retry(haystack_update, @@ -402,15 +381,16 @@ def haystack_update(app_label, model_name, pks, remove=True, using='default'): if remove: unseen_pks = set(pks) - set((instance.pk for instance in qs)) - haystack_remove.apply(args=(app_label, model_name, unseen_pks, using)) + haystack_remove.apply(args=(app_label, model_name, unseen_pks)) @task(ignore_result=True, max_retries=None) -def haystack_remove(app_label, model_name, pks, using='default'): +def haystack_remove(app_label, model_name, pks): """ Removes the haystack records for any instances with the given pks. """ + using = connection_router.for_write() backend = connections[using].get_backend() def callback(): @@ -423,16 +403,17 @@ def callback(): @task(ignore_result=True) def haystack_batch_update(app_label, model_name, pks=None, start=None, end=None, date_lookup=None, batch_size=100, - remove=True, using='default'): + remove=True): """ Batches haystack index updates for the given model. If no pks are given, a general reindex will be launched. """ model_class = get_model(app_label, model_name) + using = connection_router.for_write() index = connections[using].get_unified_index().get_index(model_class) - pk_qs = index.index_queryset().using(using) + pk_qs = index.index_queryset() if pks is not None: pk_qs = pk_qs.filter(pk__in=pks) @@ -450,4 +431,4 @@ def haystack_batch_update(app_label, model_name, pks=None, start=None, for start in xrange(0, total, batch_size): end = min(start + batch_size, total) haystack_update.delay(app_label, model_name, pks[start:end], - remove=remove, using=using) + remove=remove) diff --git a/localtv/templates/search/indexes/localtv/video_text.txt b/localtv/templates/search/indexes/localtv/video_text.txt index 4b7ae616..cc1ab668 100644 --- a/localtv/templates/search/indexes/localtv/video_text.txt +++ b/localtv/templates/search/indexes/localtv/video_text.txt @@ -1,6 +1,6 @@ {% load filters %}{{ object.name }} {{ object.description|striptags }} -{{ object|same_db_tags }} +{{ object.tags|join:" " }} {% for cat in object.all_categories %}{{ cat.name }} {% endfor %} {% for author in object.authors.all %}{{ author.get_full_name }} {{ author.username }} diff --git a/localtv/templatetags/filters.py b/localtv/templatetags/filters.py index 10921679..361e6a9c 100644 --- a/localtv/templatetags/filters.py +++ b/localtv/templatetags/filters.py @@ -2,14 +2,12 @@ import re from bs4 import BeautifulSoup, Comment -from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site from django.template import Library from django.utils.encoding import force_unicode from django.utils.html import urlize from django.utils.safestring import mark_safe import lxml.html -from tagging.models import Tag register = Library() @@ -118,24 +116,6 @@ def wmode_transparent(value): return mark_safe(wrapped_in_a_div) -@register.filter -def same_db_tags(video): - """ - Given a video, renders a string containing that video's tags, guaranteed - to be from the same database as the original. This is part of the - CELERY_USING hack and will be eliminated without warning. - - """ - from localtv.models import Video - if not isinstance(video, Video): - return u'' - using = video._state.db - ct = ContentType.objects.db_manager(using).get_for_model(video) - tags = Tag.objects.using(using).filter(items__content_type__pk=ct.pk, - items__object_id=video.pk) - return u'\n'.join([unicode(t) for t in tags]) - - @register.filter def full_url(url): """ diff --git a/localtv/tests/legacy/test_admin.py b/localtv/tests/legacy/test_admin.py index 852306f3..46f6b4b2 100644 --- a/localtv/tests/legacy/test_admin.py +++ b/localtv/tests/legacy/test_admin.py @@ -1065,7 +1065,6 @@ def test_POST_succeed(self): 'auto_approve': 'yes'}) feed = Feed.objects.get() feed_update.delay.assert_called_once_with(feed.pk, - using='default', clear_rejected=True) self.assertStatusCodeEquals(response, 302) self.assertEqual(response['Location'], diff --git a/localtv/tests/selenium/pages/__init__.py b/localtv/tests/selenium/pages/__init__.py index 7b14cdae..81941444 100644 --- a/localtv/tests/selenium/pages/__init__.py +++ b/localtv/tests/selenium/pages/__init__.py @@ -147,7 +147,9 @@ def click_by_css(self, element, wait_for_element=None): wait_for_element, wait for a passed in element to display """ elem = self._safe_find(element) + print "clicking element...", elem.click() + print "Done." if wait_for_element: self.wait_for_element_present(wait_for_element) diff --git a/localtv/tests/unit/test_imports.py b/localtv/tests/unit/test_imports.py index 278e3589..65b816b0 100644 --- a/localtv/tests/unit/test_imports.py +++ b/localtv/tests/unit/test_imports.py @@ -67,7 +67,7 @@ def test_update_approved_feed(self): self.create_vidscraper_video(), self.create_vidscraper_video() ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) self.assertEqual(Feed.objects.get(pk=feed.pk).status, Feed.ACTIVE) def test_auto_approve_True(self): @@ -82,7 +82,7 @@ def test_auto_approve_True(self): self.create_vidscraper_video(), self.create_vidscraper_video() ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) self.assertEqual(Video.objects.count(), 2) self.assertEqual(Video.objects.filter( status=Video.ACTIVE).count(), 2) @@ -98,7 +98,7 @@ def test_auto_approve_False(self): self.create_vidscraper_video(), self.create_vidscraper_video() ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) self.assertEqual(Video.objects.count(), 2) self.assertEqual(Video.objects.filter( status=Video.UNAPPROVED).count(), 2) @@ -114,7 +114,7 @@ def test_entries_inserted_in_feed_order(self): self.create_vidscraper_video(guid='2'), self.create_vidscraper_video(guid='1') ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) db_guids = Video.objects.in_feed_order().values_list('guid', flat=True) self.assertEqual(list(db_guids), ['1', '2']) @@ -130,7 +130,7 @@ def test_ignore_duplicate_guid(self): self.create_vidscraper_video(guid='duplicate'), self.create_vidscraper_video(guid='duplicate') ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) feed_import = FeedImport.objects.get(pk=feed_import.pk) # reload self.assertEqual(feed_import.videos_skipped, 1) self.assertEqual(feed_import.videos_imported, 1) @@ -147,7 +147,7 @@ def test_ignore_duplicate_link(self): self.create_vidscraper_video(link='http://duplicate.com/'), self.create_vidscraper_video(link='http://duplicate.com/') ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) feed_import = FeedImport.objects.get(pk=feed_import.pk) # reload self.assertEqual(feed_import.videos_skipped, 1) self.assertEqual(feed_import.videos_imported, 1) @@ -186,7 +186,7 @@ def test_entries_include_feed_data(self): tags=['tag1', 'tag2'] ), ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) vv = video_iter[0] video = Video.objects.get() self.assertEqual(video.feed, feed) @@ -211,7 +211,7 @@ def test_entries_link_optional(self): self.create_vidscraper_video() ] video_iter[0].url = video_iter[0].link = None - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) self.assertEqual(Video.objects.count(), 1) def test_entries_enclosure_type_optional(self): @@ -225,7 +225,7 @@ def test_entries_enclosure_type_optional(self): self.create_vidscraper_video( files=[VidscraperVideoFile('http://example.com/media.ogg')]) ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) self.assertEqual(Video.objects.count(), 1) def test_entries_multiple_imports(self): @@ -239,14 +239,14 @@ def test_entries_multiple_imports(self): self.create_vidscraper_video(guid='2'), self.create_vidscraper_video(guid='1') ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) feed_import = FeedImport.objects.get(pk=feed_import.pk) # reload self.assertEqual(feed_import.videos_skipped, 0) self.assertEqual(feed_import.videos_imported, 2) self.assertEqual(Video.objects.count(), 2) feed_import2 = FeedImport.objects.create(source=feed) - Source.update(feed, video_iter, feed_import2, using='default') + Source.update(feed, video_iter, feed_import2) feed_import2 = FeedImport.objects.get(pk=feed_import2.pk) # reload self.assertEqual(feed_import2.videos_skipped, 2) self.assertEqual(feed_import2.videos_imported, 0) @@ -271,7 +271,7 @@ def test_entries_from_mc(self): ), ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) vv = video_iter[0] video = Video.objects.get() self.assertEqual(video.website_url, vv.link) @@ -291,7 +291,7 @@ def test_entries_atom_with_long_item(self): link='http://example.com/' + 'link' * 200, files=[VidscraperVideoFile('http://example.com/' + 'f.ogg' * 200)]) ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) v = Video.objects.get() self.assertEqual(v.website_url, video_iter[0].link) self.assertEqual(v.file_url, video_iter[0].files[0].url) @@ -318,7 +318,7 @@ def count_removal(sender, **kwargs): self.create_vidscraper_video(), self.create_vidscraper_video(), ] - Source.update(feed, video_iter, feed_import, using='default') + Source.update(feed, video_iter, feed_import) self.assertEqual(self.updates, 1) self.assertEqual(self.removals, 0) self.assertEqual(SearchQuerySet().count(), len(video_iter)) diff --git a/localtv/tests/unit/test_models.py b/localtv/tests/unit/test_models.py index 3e534467..2bc98cfd 100644 --- a/localtv/tests/unit/test_models.py +++ b/localtv/tests/unit/test_models.py @@ -123,8 +123,8 @@ def test_icon(self): widget_settings = WidgetSettings.objects.get_current() widget_settings.icon.open() - site_settings.logo.open() widget_icon = widget_settings.icon.read() + site_settings.logo.open() site_settings_logo = site_settings.logo.read() self.assertEqual(len(widget_icon), len(site_settings_logo)) diff --git a/localtv/tests/unit/test_tasks.py b/localtv/tests/unit/test_tasks.py index b0c9b09c..221e5308 100644 --- a/localtv/tests/unit/test_tasks.py +++ b/localtv/tests/unit/test_tasks.py @@ -254,7 +254,7 @@ def test_distinct_pks(self): 'date_lookup': 'watch__timestamp'}) delay.assert_called_once_with(Video._meta.app_label, Video._meta.module_name, - [video1.pk], using='default', remove=True) + [video1.pk], remove=True) class VideoSaveThumbnailTestCase(BaseTestCase): diff --git a/localtv/utils.py b/localtv/utils.py index 4642eb0c..f477ded0 100644 --- a/localtv/utils.py +++ b/localtv/utils.py @@ -22,13 +22,13 @@ from localtv.settings import API_KEYS -def get_tag(tag_text, using='default'): +def get_tag(tag_text): while True: try: - tags = tagging.models.Tag.objects.using(using).filter( + tags = tagging.models.Tag.objects.filter( name=tag_text) if not tags.count(): - return tagging.models.Tag.objects.using(using).create( + return tagging.models.Tag.objects.create( name=tag_text) elif tags.count() == 1: return tags[0] @@ -59,14 +59,14 @@ def edit_string_for_tags(tag_list): return edit_string -def get_or_create_tags(tag_list, using='default'): +def get_or_create_tags(tag_list): tag_set = set() for tag_text in tag_list: if isinstance(tag_text, basestring): tag_text = tag_text[:50] # tags can only by 50 chars if settings.FORCE_LOWERCASE_TAGS: tag_text = tag_text.lower() - tag = get_tag(tag_text, using) + tag = get_tag(tag_text) tag_set.add(tag) return edit_string_for_tags(list(tag_set)) diff --git a/test_project/test_project/settings.py b/test_project/test_project/settings.py index f0d78383..1d014437 100644 --- a/test_project/test_project/settings.py +++ b/test_project/test_project/settings.py @@ -63,6 +63,7 @@ # Comment these lines out to use a celery server. CELERY_ALWAYS_EAGER = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True +# BROKER_URL = 'django://' EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' @@ -189,6 +190,8 @@ 'compressor', 'mptt', 'django_nose', + # Uncomment this line to use celery with django broker + # 'kombu.transport.django', ) if os.environ.get('MIGRATIONS'):