Skip to content

Commit

Permalink
Added auto indexing when new documents are uploaded. Added re-indexin…
Browse files Browse the repository at this point in the history
…g when sharing is changed.
  • Loading branch information
flekschas committed Jun 29, 2015
1 parent 36d4199 commit d703d5f
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 13 deletions.
18 changes: 9 additions & 9 deletions refinery/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,8 +251,6 @@ def share(self, group, readonly=True):
if not readonly:
assign_perm('change_%s' % self._meta.verbose_name, group, self)

resource_shared.send(sender=self)

def unshare(self, group):
remove_perm('read_%s' % self._meta.verbose_name, group, self)
remove_perm('change_%s' % self._meta.verbose_name, group, self)
Expand Down Expand Up @@ -315,13 +313,6 @@ class Meta:
abstract = True


def print_shared(sender, **kwargs):
logger.info("Sharable Resouce has been shared with sender: %s" % sender)

resource_shared = Signal()
resource_shared.connect(print_shared)


class TemporaryResource:
'''Mix-in class for temporary resources like NodeSet instances.
Expand Down Expand Up @@ -497,6 +488,15 @@ def get_file_size(self):

return file_size

def share(self, group, readonly=True):
super(DataSet, self).share(group, readonly)
# This might be a hack but I couldn't find an easier solution to about
# the import loop. I found this solution here
# http://stackoverflow.com/a/7199514/981933
from core.search_indexes import DataSetIndex
logger.info("Re-index / update data set: %s", self)
DataSetIndex().update_object(self, using='core')


class InvestigationLink(models.Model):
data_set = models.ForeignKey(DataSet)
Expand Down
7 changes: 3 additions & 4 deletions refinery/core/utils.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import logging
from core.models import DataSet, Project
from core.search_indexes import DataSetIndex

logger = logging.getLogger(__name__)


def update_data_set_index():
data_set_index = DataSetIndex()
data_set_index.update_object(data_set, using="core")
def index_data_set(data_set):
logger.debug('Index new data set (uuid: %s)', data_set.uuid)
DataSetIndex().update_object(data_set, using='core')
4 changes: 4 additions & 0 deletions refinery/data_set_manager/tasks.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from celery.task import task
from core.models import *
from core.utils import index_data_set
from data_set_manager.isa_tab_parser import IsaTabParser
from data_set_manager.models import Investigation, Node, \
initialize_attribute_order
Expand Down Expand Up @@ -455,6 +456,9 @@ def create_dataset(

dataset.save()

# Finally index data set
index_data_set(dataset)

return dataset.uuid

return None
Expand Down

0 comments on commit d703d5f

Please sign in to comment.