Skip to content

Commit

Permalink
Delete elasticsearch index on deletion, omit trash search results (#390)
Browse files Browse the repository at this point in the history
* Add trash checks for Datasets, Collections

* Update CHANGELOG.md

* Remove entries from Elasticsearch

* check if files are in trash

---------

Co-authored-by: Luigi Marini <lmarini@illinois.edu>
  • Loading branch information
max-zilla and lmarini committed Sep 12, 2023
1 parent a515dac commit 4f6a6e3
Show file tree
Hide file tree
Showing 11 changed files with 52 additions and 6 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Expand Up @@ -5,6 +5,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).

## Unreleased

### Fixed

- Search results are checked to verify nothing has been put in trash before display [#377](https://github.com/clowder-framework/clowder/issues/377)

## Unreleased

Expand Down
4 changes: 4 additions & 0 deletions app/api/Collections.scala
Expand Up @@ -157,6 +157,10 @@ class Collections @Inject() (datasets: DatasetService,
case Some(collection) => {
val useTrash = play.api.Play.configuration.getBoolean("useTrash").getOrElse(false)
if (!useTrash || (useTrash && collection.trash)){
Logger.debug("Deleting collection from indexes " + collectionId)
current.plugin[ElasticsearchPlugin].foreach {
_.delete(collectionId.stringify)
}
events.addObjectEvent(request.user , collection.id, collection.name, EventType.DELETE_COLLECTION.toString)
collections.delete(collectionId)
current.plugin[AdminsNotifierPlugin].foreach {
Expand Down
5 changes: 5 additions & 0 deletions app/api/Datasets.scala
Expand Up @@ -2041,6 +2041,11 @@ class Datasets @Inject()(
def deleteDatasetHelper(id: UUID, request: UserRequest[AnyContent]) = {
datasets.get(id) match {
case Some(dataset) => {
Logger.debug("Deleting dataset from indexes " + id)
current.plugin[ElasticsearchPlugin].foreach {
_.delete(id.stringify)
}

//remove dataset from RDF triple store if triple store is used
configuration.getString("userdfSPARQLStore").getOrElse("no") match {
case "yes" => rdfsparql.removeDatasetFromGraphs(id)
Expand Down
3 changes: 3 additions & 0 deletions app/api/Files.scala
Expand Up @@ -1697,6 +1697,9 @@ class Files @Inject()(
current.plugin[VersusPlugin].foreach {
_.removeFromIndexes(id)
}
current.plugin[ElasticsearchPlugin].foreach {
_.delete(id.stringify)
}
Logger.debug("Deleting file: " + file.filename)
files.removeFile(id, Utils.baseUrl(request), request.apiKey, request.user)

Expand Down
2 changes: 2 additions & 0 deletions app/services/CollectionService.scala
Expand Up @@ -252,4 +252,6 @@ trait CollectionService {

def getMetrics(): Iterator[Collection]

def isInTrash(id: UUID): Boolean

}
2 changes: 2 additions & 0 deletions app/services/DatasetService.scala
Expand Up @@ -389,5 +389,7 @@ trait DatasetService {

def getTrashedIds(): List[UUID]

def isInTrash(id: UUID): Boolean

def recursiveArchive(dataset: Dataset, host: String, parameters: JsObject, apiKey: Option[String], user: Option[User])
}
18 changes: 12 additions & 6 deletions app/services/ElasticsearchPlugin.scala
Expand Up @@ -228,12 +228,18 @@ class ElasticsearchPlugin(application: Application) extends Plugin {
// Check permissions for each resource
results.foreach(resource => {
resource.resourceType match {
case ResourceRef.file => if (Permission.checkPermission(user, Permission.ViewFile, resource))
filesFound += resource.id
case ResourceRef.dataset => if (Permission.checkPermission(user, Permission.ViewDataset, resource))
datasetsFound += resource.id
case ResourceRef.collection => if (Permission.checkPermission(user, Permission.ViewDataset, resource))
collectionsFound += resource.id
case ResourceRef.file => {
if (Permission.checkPermission(user, Permission.ViewFile, resource) && !files.isInTrash(resource.id))
filesFound += resource.id
}
case ResourceRef.dataset => {
if (Permission.checkPermission(user, Permission.ViewDataset, resource) && !datasets.isInTrash(resource.id))
datasetsFound += resource.id
}
case ResourceRef.collection => {
if (Permission.checkPermission(user, Permission.ViewDataset, resource) && !collections.isInTrash(resource.id))
collectionsFound += resource.id
}
case _ => {}
}
})
Expand Down
2 changes: 2 additions & 0 deletions app/services/FileService.scala
Expand Up @@ -248,4 +248,6 @@ trait FileService {

def getIterator(space: Option[String], since: Option[String], until: Option[String]): Iterator[File]

def isInTrash(id: UUID): Boolean

}
4 changes: 4 additions & 0 deletions app/services/mongodb/MongoDBCollectionService.scala
Expand Up @@ -1116,6 +1116,10 @@ class MongoDBCollectionService @Inject() (
Collection.find(MongoDBObject("trash" -> false)).toIterator
}

def isInTrash(id: UUID): Boolean = {
Collection.findOne(MongoDBObject("trash" -> true, "_id" -> new ObjectId(id.stringify))).isDefined
}

private def isSubCollectionIdInCollection(subCollectionId: UUID, collection: Collection) : Boolean = {
if (collection.child_collection_ids.contains(subCollectionId)){
return true
Expand Down
4 changes: 4 additions & 0 deletions app/services/mongodb/MongoDBDatasetService.scala
Expand Up @@ -1686,6 +1686,10 @@ class MongoDBDatasetService @Inject() (
trashedIds.toList
}

def isInTrash(id: UUID): Boolean = {
Dataset.findOne(MongoDBObject("trash" -> true, "_id" -> new ObjectId(id.stringify))).isDefined
}

/**
* Recursively submit requests to archive or unarchive the contents of the given dataset.
* NOTE: "parameters" includes "operation", which supports both archiving and unarchiving
Expand Down
9 changes: 9 additions & 0 deletions app/services/mongodb/MongoDBFileService.scala
Expand Up @@ -1237,6 +1237,15 @@ class MongoDBFileService @Inject() (
until.foreach(t => query = query ++ ("uploadDate" $lte Parsers.fromISO8601(t)))
FileDAO.find(query)
}

def isInTrash(id: UUID): Boolean = {
var foundTrash = false
datasets.findByFileIdAllContain(id).foreach(ds => {
if (ds.trash)
foundTrash = true
})
foundTrash
}
}

object FileDAO extends ModelCompanion[File, ObjectId] {
Expand Down

0 comments on commit 4f6a6e3

Please sign in to comment.