From 75abf37de6ab2d3c9dee69e49082b9d32f83d6e5 Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 12:28:33 +0200 Subject: [PATCH 01/17] Add enhanced bulk indexing for Elasticsearch - Introduced `LocalDateTimeJsonSerializer` and `LocalDateTimeJsonDeserializer` for improved LocalDateTime handling. - Enhanced bulk indexing in ElasticIndexService: - Added new methods for reindexing cases and tasks into Elasticsearch. - Introduced batch processing for efficient indexing with log monitoring. - Removed deprecated `bulkIndex(List, Class, String...)` method. - Updated ElasticTask and IndexCaseEvent to replace `stringId` with `id` field, simplifying object mapping. - Improved task and case processing for better compatibility with MongoDB and Elasticsearch. - Updated task entity construction in response body for consistency. --- .../ElasticsearchConfiguration.java | 61 ++++- .../DataConfigurationProperties.java | 57 +++- .../service/ElasticCaseMappingService.java | 4 +- .../elastic/service/ElasticCaseService.java | 14 +- .../elastic/service/ElasticIndexService.java | 256 ++++++++++++++++-- .../service/ElasticTaskQueueManager.java | 6 +- .../elastic/service/ElasticTaskService.java | 2 +- .../elastic/service/ReindexingTask.java | 30 +- .../interfaces/IElasticIndexService.java | 3 +- .../engine/elastic/web/ElasticController.java | 32 ++- .../web/requestbodies/IndexParams.java | 27 ++ .../workflow/web/responsebodies/Task.java | 2 +- .../engine-processes/menu/menu_item.xml | 2 +- .../elastic/DataSearchRequestTest.groovy | 2 +- .../engine/elastic/ReindexTest.groovy | 2 +- .../filters/FilterImportExportTest.groovy | 2 +- .../objects/elastic/domain/CaseField.java | 4 + .../objects/elastic/domain/ElasticCase.java | 18 +- .../objects/elastic/domain/ElasticTask.java | 4 +- .../LocalDateTimeJsonDeserializer.java | 31 +++ .../LocalDateTimeJsonSerializer.java | 18 ++ .../event/events/task/IndexTaskEvent.java | 2 +- .../event/events/workflow/IndexCaseEvent.java | 2 +- .../spring/elastic/domain/CaseField.java | 7 + .../spring/elastic/domain/ElasticCase.java | 10 - .../spring/elastic/domain/ElasticTask.java | 6 - 26 files changed, 500 insertions(+), 104 deletions(-) create mode 100644 application-engine/src/main/java/com/netgrif/application/engine/elastic/web/requestbodies/IndexParams.java create mode 100644 nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/serializer/LocalDateTimeJsonDeserializer.java create mode 100644 nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/serializer/LocalDateTimeJsonSerializer.java diff --git a/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java b/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java index 06f4ac6d0e8..bf5603226c2 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java @@ -1,12 +1,22 @@ package com.netgrif.application.engine.configuration; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import com.netgrif.application.engine.configuration.properties.DataConfigurationProperties; +import com.netgrif.application.engine.objects.elastic.serializer.LocalDateTimeJsonDeserializer; +import com.netgrif.application.engine.objects.elastic.serializer.LocalDateTimeJsonSerializer; import com.netgrif.application.engine.workflow.service.CaseEventHandler; +import org.jetbrains.annotations.NotNull; import org.springframework.context.annotation.*; import org.springframework.data.elasticsearch.client.ClientConfiguration; import org.springframework.data.elasticsearch.repository.config.EnableElasticsearchRepositories; +import org.springframework.data.elasticsearch.support.HttpHeaders; -@Configuration +import java.time.LocalDateTime; +import java.util.List; + + @Configuration @EnableElasticsearchRepositories(excludeFilters = { @ComponentScan.Filter( type = FilterType.REGEX, @@ -46,10 +56,53 @@ public CaseEventHandler caseEventHandler() { return new CaseEventHandler(); } + @NotNull @Override public ClientConfiguration clientConfiguration() { - return ClientConfiguration.builder() - .connectedTo(elasticsearchProperties.getUrl() + ":" + elasticsearchProperties.getSearchPort()) - .build(); + List urls = sanitizeUrls(elasticsearchProperties.getUrl()); + + ClientConfiguration.MaybeSecureClientConfigurationBuilder client = ClientConfiguration.builder() + .connectedTo(urls.toArray(String[]::new)); + ClientConfiguration.TerminalClientConfigurationBuilder clientBuilder = client; + + if (elasticsearchProperties.isSsl()) { + clientBuilder = client.usingSsl(); + } + if (hasCredentials()) { + clientBuilder = clientBuilder.withBasicAuth(elasticsearchProperties.getUsername(), elasticsearchProperties.getPassword()); + } else if (hasToken()) { + clientBuilder.withHeaders(() -> { + HttpHeaders headers = new HttpHeaders(); + headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + elasticsearchProperties.getToken()); + return headers; + }); + } + return clientBuilder.build(); + } + + @Bean(name = "elasticCaseObjectMapper") + public ObjectMapper configureMapper() { + ObjectMapper mapper = new ObjectMapper(); + + JavaTimeModule javaTimeModule = new JavaTimeModule(); + javaTimeModule.addSerializer(LocalDateTime.class, new LocalDateTimeJsonSerializer()); + javaTimeModule.addDeserializer(LocalDateTime.class, new LocalDateTimeJsonDeserializer()); + + mapper.registerModule(javaTimeModule); + mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); + return mapper; + } + + private boolean hasCredentials() { + return elasticsearchProperties.getUsername() != null && !elasticsearchProperties.getUsername().isBlank() && + elasticsearchProperties.getPassword() != null && !elasticsearchProperties.getPassword().isBlank(); + } + + private boolean hasToken() { + return elasticsearchProperties.getToken() != null && !elasticsearchProperties.getToken().isBlank(); + } + + private List sanitizeUrls(List urls) { + return urls.stream().map(u -> u.contains(":") ? u : u + ":" + elasticsearchProperties.getSearchPort()).toList(); } } diff --git a/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java b/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java index 36cfbb61506..fff7f31cd9a 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java @@ -1,6 +1,7 @@ package com.netgrif.application.engine.configuration.properties; import jakarta.annotation.PostConstruct; +import jakarta.validation.constraints.Min; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; @@ -14,6 +15,7 @@ import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; +import javax.validation.Valid; import java.time.Duration; import java.util.*; @@ -155,7 +157,12 @@ public static class ElasticsearchProperties { /** * Hostname for the Elasticsearch server. */ - private String url = "localhost"; + private List url = List.of("localhost"); + + /** + * Indicates if SSL is enabled for Elasticsearch communication. + */ + private boolean ssl = false; /** * Port for connecting to Elasticsearch transport client. @@ -167,6 +174,21 @@ public static class ElasticsearchProperties { */ private int searchPort = 9200; + /** + * The username used for authenticating with the Elasticsearch server. + */ + private String username = null; + + /** + * The password used for authenticating with the Elasticsearch server. + */ + private String password = null; + + /** + * The authentication token for the Elasticsearch server, when using token-based authentication. + */ + private String token = null; + /** * Command to trigger a reindexing job. */ @@ -242,6 +264,15 @@ public static class ElasticsearchProperties { */ private PriorityProperties priority = new PriorityProperties(); + + /** + * Batch-related configuration properties for Elasticsearch operations. + * These properties control the batch size for cases and tasks during + * bulk operations to optimize performance and resource usage. + */ + @Valid + private BatchProperties batch = new BatchProperties(); + public static final String PETRI_NET_INDEX = "petriNet"; public static final String CASE_INDEX = "case"; @@ -334,6 +365,30 @@ public static class PriorityProperties { "visualId.keyword^2" ); } + + /** + * Configuration properties for batch operations in Elasticsearch. + * This class specifies the batch sizes for cases and tasks when performing + * bulk operations like indexing or updating. These values are used to + * control and optimize resource consumption during high-load processes. + */ + @Data + public static class BatchProperties { + + /** + * Default batch size for cases during Elasticsearch bulk operations. + * This value must be at least 1. The default is 5000. + */ + @Min(1) + private int caseBatchSize = 5000; + + /** + * Default batch size for tasks during Elasticsearch bulk operations. + * This value must be at least 1. The default is 20000. + */ + @Min(1) + private int taskBatchSize = 20000; + } } /** diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseMappingService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseMappingService.java index 16abaf36493..51ace92131b 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseMappingService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseMappingService.java @@ -257,11 +257,11 @@ private StringBuilder buildFullName(String name, String surname) { dateField, com.netgrif.application.engine.objects.petrinet.domain.dataset.DateField netField) { if (dateField.getValue() instanceof LocalDate) { LocalDate date = (LocalDate) dateField.getValue(); - return formatDateField(LocalDateTime.of(date, LocalTime.NOON)); + return formatDateField(LocalDateTime.of(date, LocalTime.MIDNIGHT)); } else if (dateField.getValue() instanceof Date) { // log.warn(String.format("DateFields should have LocalDate values! DateField (%s) with Date value found! Value will be converted for indexation.", netField.getImportId())); LocalDateTime transformed = this.transformDateValueField(dateField); - return formatDateField(LocalDateTime.of(transformed.toLocalDate(), LocalTime.NOON)); + return formatDateField(LocalDateTime.of(transformed.toLocalDate(), LocalTime.MIDNIGHT)); } else { // TODO throw error? log.error(String.format("Unsupported DateField value type (%s)! Skipping indexation...", dateField.getValue().getClass().getCanonicalName())); diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseService.java index caf073aa689..554093aa59e 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseService.java @@ -103,22 +103,22 @@ public void removeByPetriNetId(String processId) { @Override public void index(ElasticCase useCase) { - executors.execute(useCase.getStringId(), () -> { + executors.execute(useCase.getId(), () -> { try { - com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticCase elasticCase = repository.findByStringId(useCase.getStringId()); + com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticCase elasticCase = repository.findByStringId(useCase.getId()); if (elasticCase == null) { repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticCase) useCase); } else { elasticCase.update(useCase); repository.save(elasticCase); } - log.debug("[" + useCase.getStringId() + "]: Case \"" + useCase.getTitle() + "\" indexed"); + log.debug("[" + useCase.getId() + "]: Case \"" + useCase.getTitle() + "\" indexed"); publisher.publishEvent(new IndexCaseEvent(useCase)); } catch (InvalidDataAccessApiUsageException ignored) { - log.debug("[" + useCase.getStringId() + "]: Case \"" + useCase.getTitle() + "\" has duplicates, will be reindexed"); - repository.deleteAllByStringId(useCase.getStringId()); + log.debug("[" + useCase.getId() + "]: Case \"" + useCase.getTitle() + "\" has duplicates, will be reindexed"); + repository.deleteAllByStringId(useCase.getId()); repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticCase) useCase); - log.debug("[" + useCase.getStringId() + "]: Case \"" + useCase.getTitle() + "\" indexed"); + log.debug("[" + useCase.getId() + "]: Case \"" + useCase.getTitle() + "\" indexed"); } }); } @@ -144,7 +144,7 @@ public Page search(List requests, LoggedUser user, Page if (query != null) { SearchHits hits = template.search(query, ElasticCase.class, IndexCoordinates.of(elasticProperties.getIndex().get(DataConfigurationProperties.ElasticsearchProperties.CASE_INDEX))); Page indexedCases = (Page) SearchHitSupport.unwrapSearchHits(SearchHitSupport.searchPageFor(hits, query.getPageable())); - casePage = workflowService.findAllById(indexedCases.get().map(ElasticCase::getStringId).collect(Collectors.toList())); + casePage = workflowService.findAllById(indexedCases.get().map(ElasticCase::getId).collect(Collectors.toList())); total = indexedCases.getTotalElements(); log.debug("Found [{}] total elements of page [{}]", casePage.size(), pageable.getPageNumber()); } else { diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java index 7b1d858add5..7c8d586079f 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java @@ -1,11 +1,19 @@ package com.netgrif.application.engine.elastic.service; +import co.elastic.clients.elasticsearch.ElasticsearchClient; import co.elastic.clients.elasticsearch._types.AcknowledgedResponse; +import co.elastic.clients.elasticsearch.core.BulkRequest; +import co.elastic.clients.elasticsearch.core.BulkResponse; +import co.elastic.clients.elasticsearch.core.bulk.BulkOperation; import co.elastic.clients.elasticsearch.indices.*; import com.fasterxml.jackson.databind.ObjectMapper; import com.netgrif.application.engine.configuration.properties.DataConfigurationProperties; +import com.netgrif.application.engine.elastic.service.interfaces.IElasticCaseMappingService; import com.netgrif.application.engine.elastic.service.interfaces.IElasticIndexService; +import com.netgrif.application.engine.elastic.service.interfaces.IElasticTaskMappingService; +import com.netgrif.application.engine.objects.elastic.domain.ElasticCase; +import com.netgrif.application.engine.objects.workflow.domain.Case; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.context.ApplicationContext; @@ -17,14 +25,17 @@ import org.springframework.data.elasticsearch.core.SearchScrollHits; import org.springframework.data.elasticsearch.core.document.Document; import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates; -import org.springframework.data.elasticsearch.core.query.IndexQuery; import org.springframework.data.elasticsearch.core.query.IndexQueryBuilder; import org.springframework.data.elasticsearch.core.query.Query; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.util.CloseableIterator; import org.springframework.stereotype.Service; import org.springframework.util.Assert; import java.io.InputStream; import java.lang.reflect.Field; +import java.time.LocalDateTime; import java.util.*; @Slf4j @@ -35,6 +46,10 @@ public class ElasticIndexService implements IElasticIndexService { private static final String PLACEHOLDERS = "petriNetIndex, caseIndex, taskIndex"; private final ApplicationContext context; private final ElasticsearchTemplate elasticsearchTemplate; + private final ElasticsearchClient elasticsearchClient; + private final MongoTemplate mongoTemplate; + private final IElasticCaseMappingService caseMappingService; + private final IElasticTaskMappingService taskMappingService; private final DataConfigurationProperties.ElasticsearchProperties elasticsearchProperties; @Override @@ -54,23 +69,6 @@ public String index(Class clazz, T source, String... placeholders) { .withObject(source).build(), IndexCoordinates.of(indexName)); } - @Override - public boolean bulkIndex(List list, Class clazz, String... placeholders) { - String indexName = getIndexName(clazz, placeholders); - try { - if (list != null && !list.isEmpty()) { - List indexQueries = new ArrayList<>(); - list.forEach(source -> - indexQueries.add(new IndexQueryBuilder().withId(getIdFromSource(source)).withObject(source).build())); - elasticsearchTemplate.bulkIndex(indexQueries, IndexCoordinates.of(indexName)); - } - } catch (Exception e) { - log.error("bulkIndex:", e); - return false; - } - return true; - } - @Override public boolean createIndex(Class clazz, String... placeholders) { try { @@ -289,6 +287,228 @@ public void clearScrollHits(List scrollIds) { } } + /** + * Performs bulk indexing of cases and tasks into Elasticsearch. + * + * @param indexAll if true, indexes all cases and tasks, regardless of modification time + * @param after the time after which cases and tasks should be considered for reindexing + * @param caseBatchSize number of cases to process per batch. If null, defaults from Elasticsearch properties + * @param taskBatchSize number of tasks to process per batch. If null, defaults from Elasticsearch properties + */ + @Override + public void bulkIndex(boolean indexAll, LocalDateTime after, Integer caseBatchSize, Integer taskBatchSize) { + log.info("Reindexing stale cases: started reindexing after {}", after); + LocalDateTime now = LocalDateTime.now(); + + if (caseBatchSize == null) { + caseBatchSize = elasticsearchProperties.getBatch().getCaseBatchSize(); + } + if (taskBatchSize == null) { + taskBatchSize = elasticsearchProperties.getBatch().getTaskBatchSize(); + } + + org.springframework.data.mongodb.core.query.Query query; + if (indexAll || after == null) { + query = org.springframework.data.mongodb.core.query.Query.query(Criteria.where("lastModified").lt(now)); + log.info("Reindexing stale cases: force all"); + } else { + query = org.springframework.data.mongodb.core.query.Query.query(Criteria.where("lastModified").lt(now).gt(after.minusMinutes(2))); + } + + long count = mongoTemplate.count(query, Case.class); + if (count > 0) { + reindexQueried(query, count, caseBatchSize, taskBatchSize); + } + log.info("Reindexing stale cases: end"); + } + + /** + * Reindexes queried cases and tasks into Elasticsearch in batches. + * + * @param count total number of cases to reindex + * @param caseBatchSize batch size for cases + * @param taskBatchSize batch size for tasks + */ + private void reindexQueried(org.springframework.data.mongodb.core.query.Query query, long count, int caseBatchSize, int taskBatchSize) { + long numOfPages = ((count / caseBatchSize) + 1); + log.info("Reindexing {} pages", numOfPages); + + query.cursorBatchSize(caseBatchSize); + long page = 1, currentBatchSize = 0; + List caseOperations = new ArrayList<>(); + List caseIds = new ArrayList<>(); + + try (CloseableIterator cursor = mongoTemplate.stream(query, Case.class)) { + while (cursor.hasNext()) { + Case aCase = cursor.next(); + prepareCase(aCase); + ElasticCase doc = caseMappingService.transform(aCase); + prepareCaseBulkOperation(doc, caseOperations); + caseIds.add(aCase.getStringId()); + + if (++currentBatchSize == caseBatchSize || !cursor.hasNext()) { + log.info("Reindexing case page {} / {}", page, numOfPages); + executeAndValidate(caseOperations); + bulkIndexTasks(caseIds, taskBatchSize); + caseOperations.clear(); + caseIds.clear(); + currentBatchSize = 0; + page++; + } + } + } + } + + /** + * Reindexes tasks into Elasticsearch in batches corresponding to the provided case IDs. + * + * @param caseIds list of case IDs whose tasks need to be reindexed + * @param taskBatchSize size of the batch for tasks + */ + private void bulkIndexTasks(List caseIds, int taskBatchSize) { + if (caseIds == null || caseIds.isEmpty()) { + return; + } + org.springframework.data.mongodb.core.query.Query query = org.springframework.data.mongodb.core.query.Query.query(Criteria.where("caseId").in(caseIds)).cursorBatchSize(taskBatchSize); + long totalSize = mongoTemplate.count(query, Task.class); + long numOfPages = ((totalSize / taskBatchSize) + 1); + + long page = 1, currentBatchSize = 0; + List taskOperations = new ArrayList<>(); + + try (CloseableIterator cursor = mongoTemplate.stream(query, Task.class)) { + while (cursor.hasNext()) { + Task task = cursor.next(); + ElasticTask elasticTask = taskMappingService.transform(task); + prepareTaskBulkOperation(elasticTask, taskOperations); + + if (++currentBatchSize == taskBatchSize || !cursor.hasNext()) { + log.info("Reindexing task page {} / {}", page, numOfPages); + executeAndValidate(taskOperations); + taskOperations.clear(); + currentBatchSize = 0; + page++; + } + } + } + } + + /** + * Prepares the case object by ensuring necessary dependencies and last modified timestamp are set. + * + * @param useCase case object to prepare + */ + private void prepareCase(Case useCase) { + if (useCase.getPetriNet() == null) { + useCase.setPetriNet(petriNetService.get(useCase.getPetriNetObjectId())); + } + if (useCase.getLastModified() == null) { + useCase.setLastModified(LocalDateTime.now()); + } + } + + /** + * Prepares a bulk operation for indexing or updating a case in Elasticsearch. + * + * @param doc transformed ElasticCase object + * @param operations collection of BulkOperations to add this operation to + */ + private void prepareCaseBulkOperation(ElasticCase doc, List operations) { + try { + operations.add(BulkOperation.of(op -> op + .update(u -> u + .index(elasticsearchProperties.getIndex().get("case")) + .id(doc.getStringId()) + .action(a -> a + .doc(doc) + .docAsUpsert(true) + ) + ))); + } catch (Exception e) { + log.error("Failed to prepare bulk operation for case [{}]: {}", doc.getStringId(), e.getMessage()); + } + } + + /** + * Prepares a bulk operation for indexing or updating a task in Elasticsearch. + * + * @param doc transformed ElasticTask object + * @param operations collection of BulkOperations to add this operation to + */ + private void prepareTaskBulkOperation(ElasticTask doc, List operations) { + try { + operations.add(BulkOperation.of(op -> op + .update(u -> u + .index(elasticsearchProperties.getIndex().get("task")) + .id(doc.getStringId()) + .action(a -> a + .doc(doc) + .docAsUpsert(true) + ) + )) + ); + } catch (Exception e) { + log.error("Failed to prepare bulk operation for task [{}]: {}", doc.getStringId(), e.getMessage()); + } + } + + /** + * Executes the bulk operations and validates the results, retrying on partial failures. + * + * @param operations list of bulk operations to execute + */ + private void executeAndValidate(List operations) { + if (operations.isEmpty()) { + return; + } + + BulkRequest.Builder builder = new BulkRequest.Builder(); + builder.operations(operations); + + try { + BulkResponse response = elasticsearchClient.bulk(builder.build()); + checkForBulkUpdateFailure(response); + log.info("Batch indexed successfully with {} ops", operations.size()); + } catch (ElasticsearchException e) { + log.warn("Failed for {} ops to index bulk {}", operations.size(), e.getMessage(), e); + + if (operations.size() == 1) { + log.error("Single operation failed. Skipping. {}", operations.get(0), e); + return; + } + + log.warn("Dividing the requirement."); + + int mid = operations.size() / 2; + List left = operations.subList(0, mid); + List right = operations.subList(mid, operations.size()); + + executeAndValidate(new ArrayList<>(left)); + executeAndValidate(new ArrayList<>(right)); + } catch (Exception e) { + log.error("Failed to index bulk: {}", e.getMessage(), e); + } + } + + /** + * Checks the results of a bulk indexing operation for failures. + * + * @param response the BulkResponse from Elasticsearch + * @throws ElasticsearchException if there are failures in the bulk response + */ + private void checkForBulkUpdateFailure(BulkResponse response) { + Map failedDocuments = new HashMap<>(); + response.items().forEach(item -> { + if (item.error() != null) { + failedDocuments.put(item.id(), item.error().reason()); + } + }); + + if (!failedDocuments.isEmpty()) { + throw new ElasticsearchException("Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for details [{}]", failedDocuments); + } + } + private String getIdFromSource(Object source) { if (source == null) { return null; diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java index 0f456534b64..b8676f06b1a 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java @@ -116,17 +116,17 @@ private ElasticTask indexTaskWorker(ElasticTask task) { log.debug("Indexing task [{}] in thread [{}]", task.getTaskId(), Thread.currentThread().getName()); com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask elasticTask = null; try { - elasticTask = repository.findByStringId(task.getStringId()); + elasticTask = repository.findByStringId(task.getId()); if (elasticTask == null) { elasticTask = repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask) task); } else { elasticTask.update(task); elasticTask = repository.save(elasticTask); } - log.debug("[{}]: Task \"{}\" [{}] indexed", task.getCaseId(), task.getTitle(), task.getStringId()); + log.debug("[{}]: Task \"{}\" [{}] indexed", task.getCaseId(), task.getTitle(), task.getId()); } catch (InvalidDataAccessApiUsageException e) { log.debug("[{}]: Task \"{}\" has duplicates, will be reindexed", task.getCaseId(), task.getTitle()); - repository.deleteAllByStringId(task.getStringId()); + repository.deleteAllByStringId(task.getId()); repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask) task); log.debug("[{}]: Task \"{}\" indexed", task.getCaseId(), task.getTitle()); } catch (RuntimeException e) { diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskService.java index e89ba841eb8..7bc7f0f4cc3 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskService.java @@ -150,7 +150,7 @@ public Page search(List requests, LoggedUser use if (query != null) { SearchHits hits = elasticsearchTemplate.search(query, com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask.class, IndexCoordinates.of(elasticsearchProperties.getIndex().get(DataConfigurationProperties.ElasticsearchProperties.TASK_INDEX))); Page indexedTasks = (Page) SearchHitSupport.unwrapSearchHits(SearchHitSupport.searchPageFor(hits, query.getPageable())); - taskPage = taskService.findAllById(indexedTasks.get().map(ElasticTask::getStringId).collect(Collectors.toList())); + taskPage = taskService.findAllById(indexedTasks.get().map(ElasticTask::getId).collect(Collectors.toList())); total = indexedTasks.getTotalElements(); } else { taskPage = Collections.emptyList(); diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ReindexingTask.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ReindexingTask.java index 2231a2ae1c3..c25a5080f6f 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ReindexingTask.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ReindexingTask.java @@ -2,10 +2,7 @@ import com.netgrif.application.engine.configuration.properties.DataConfigurationProperties; import com.netgrif.application.engine.elastic.domain.ElasticCaseRepository; -import com.netgrif.application.engine.elastic.service.interfaces.IElasticCaseMappingService; -import com.netgrif.application.engine.elastic.service.interfaces.IElasticCaseService; -import com.netgrif.application.engine.elastic.service.interfaces.IElasticTaskMappingService; -import com.netgrif.application.engine.elastic.service.interfaces.IElasticTaskService; +import com.netgrif.application.engine.elastic.service.interfaces.*; import com.netgrif.application.engine.objects.workflow.domain.Case; import com.netgrif.application.engine.adapter.spring.workflow.domain.QCase; import com.netgrif.application.engine.objects.workflow.domain.Task; @@ -37,7 +34,6 @@ public class ReindexingTask { private static final Logger log = LoggerFactory.getLogger(ReindexingTask.class); private int pageSize; - private CaseRepository caseRepository; private TaskRepository taskRepository; private ElasticCaseRepository elasticCaseRepository; private IElasticCaseService elasticCaseService; @@ -47,10 +43,10 @@ public class ReindexingTask { private IWorkflowService workflowService; private DataConfigurationProperties.ElasticsearchProperties elasticsearchProperties; private LocalDateTime lastRun; + private IElasticIndexService elasticIndexService; @Autowired public ReindexingTask( - CaseRepository caseRepository, TaskRepository taskRepository, ElasticCaseRepository elasticCaseRepository, @Qualifier("reindexingTaskElasticCaseService") @@ -60,8 +56,8 @@ public ReindexingTask( IElasticCaseMappingService caseMappingService, IElasticTaskMappingService taskMappingService, IWorkflowService workflowService, - DataConfigurationProperties.ElasticsearchProperties elasticsearchProperties) { - this.caseRepository = caseRepository; + DataConfigurationProperties.ElasticsearchProperties elasticsearchProperties, + IElasticIndexService elasticIndexService) { this.taskRepository = taskRepository; this.elasticCaseRepository = elasticCaseRepository; this.elasticCaseService = elasticCaseService; @@ -71,6 +67,7 @@ public ReindexingTask( this.workflowService = workflowService; this.elasticsearchProperties = elasticsearchProperties; this.pageSize = elasticsearchProperties.getReindexExecutor().getSize(); + this.elasticIndexService = elasticIndexService; lastRun = LocalDateTime.now(); if (this.elasticsearchProperties.getReindexFrom() != null) { @@ -82,26 +79,11 @@ public ReindexingTask( public void reindex() { log.info("Reindexing stale cases: started reindexing after " + lastRun); - BooleanExpression predicate = QCase.case$.lastModified.before(LocalDateTime.now()).and(QCase.case$.lastModified.after(lastRun.minusMinutes(2))); - - lastRun = LocalDateTime.now(); - long count = caseRepository.count(predicate); - if (count > 0) { - reindexAllPages(predicate, count); - } + elasticIndexService.bulkIndex(false, lastRun, null, null); log.info("Reindexing stale cases: end"); } - private void reindexAllPages(BooleanExpression predicate, long count) { - long numOfPages = ((count / pageSize) + 1); - log.info("Reindexing " + numOfPages + " pages"); - - for (int page = 0; page < numOfPages; page++) { - reindexPage(predicate, page, numOfPages, false); - } - } - public void forceReindexPage(Predicate predicate, int page, long numOfPages) { reindexPage(predicate, page, numOfPages, true); } diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/interfaces/IElasticIndexService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/interfaces/IElasticIndexService.java index bb9df30e35d..d58a7882a58 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/interfaces/IElasticIndexService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/interfaces/IElasticIndexService.java @@ -5,6 +5,7 @@ import org.springframework.data.elasticsearch.core.document.Document; import org.springframework.data.elasticsearch.core.query.Query; +import java.time.LocalDateTime; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -13,7 +14,7 @@ public interface IElasticIndexService { boolean indexExists(String indexName); - boolean bulkIndex(List list, Class clazz, String... placeholders); + void bulkIndex(boolean indexAll, LocalDateTime lastRun, Integer caseBatchSize, Integer taskBatchSize); boolean createIndex(Class clazz, String... placeholders); diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/ElasticController.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/ElasticController.java index fdd946aace5..809c0e3efea 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/ElasticController.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/ElasticController.java @@ -1,6 +1,8 @@ package com.netgrif.application.engine.elastic.web; import com.netgrif.application.engine.configuration.properties.DataConfigurationProperties; +import com.netgrif.application.engine.elastic.service.interfaces.IElasticIndexService; +import com.netgrif.application.engine.elastic.web.requestbodies.IndexParams; import com.netgrif.application.engine.objects.auth.domain.LoggedUser; import com.netgrif.application.engine.elastic.service.ReindexingTask; import com.netgrif.application.engine.workflow.service.CaseSearchService; @@ -51,6 +53,8 @@ public class ElasticController { private DataConfigurationProperties.ElasticsearchProperties elasticsearchProperties; + private IElasticIndexService indexService; + @Autowired public void setWorkflowService(IWorkflowService workflowService) { this.workflowService = workflowService; @@ -71,6 +75,11 @@ public void setElasticsearchProperties(DataConfigurationProperties.Elasticsearch this.elasticsearchProperties = elasticsearchProperties; } + @Autowired + public void setIndexService(IElasticIndexService indexService) { + this.indexService = indexService; + } + @PreAuthorize("@authorizationService.hasAuthority('ADMIN')") @Operation(summary = "Reindex specified cases", description = "Caller must have the ADMIN role", @@ -88,11 +97,11 @@ public MessageResource reindex(@RequestBody Map searchBody, Auth if (count == 0) { log.info("No cases to reindex"); } else { - long numOfPages = (long) ((count / elasticsearchProperties.getReindexExecutor().getSize()) + 1); + long numOfPages = (count / elasticsearchProperties.getReindexExecutor().getSize()) + 1; log.info("Reindexing cases: " + numOfPages + " pages"); for (int page = 0; page < numOfPages; page++) { - log.info("Indexing page " + (page + 1)); + log.info("Indexing page {}", page + 1); Predicate predicate = searchService.buildQuery(searchBody, user, locale); reindexingTask.forceReindexPage(predicate, page, numOfPages); } @@ -104,4 +113,23 @@ public MessageResource reindex(@RequestBody Map searchBody, Auth return MessageResource.errorMessage(e.getMessage()); } } + + @PreAuthorize("@authorizationService.hasAuthority('ADMIN')") + @Operation(summary = "Reindex all or stale cases with bulk index", + description = "Reindex all or stale cases (specified by IndexParams.indexAll param) with bulk index. Caller must have the ADMIN role", + security = {@SecurityRequirement(name = "BasicAuth")}) + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "OK"), + @ApiResponse(responseCode = "403", description = "Caller doesn't fulfill the authorisation requirements"), + }) + @PostMapping(value = "/reindex/bulk", produces = MediaType.APPLICATION_JSON_VALUE) + public MessageResource bulkReindex(IndexParams indexParams) { + try { + indexService.bulkIndex(indexParams.isIndexAll(), null, indexParams.getCaseBatchSize(), indexParams.getTaskBatchSize()); + return MessageResource.successMessage("Success"); + } catch (Exception e) { + log.error("Could not index: ", e); + return MessageResource.errorMessage(e.getMessage()); + } + } } diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/requestbodies/IndexParams.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/requestbodies/IndexParams.java new file mode 100644 index 00000000000..17710bbe22f --- /dev/null +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/requestbodies/IndexParams.java @@ -0,0 +1,27 @@ +package com.netgrif.application.engine.elastic.web.requestbodies; + +import lombok.Data; + +/** + * Represents the parameters to configure the indexing operation. + * This class allows customization of batch sizes for cases and tasks, + * as well as the option to index all data. + */ +@Data +public class IndexParams { + + /** + * Determines whether to index all available data. Default is {@code false}. + */ + private boolean indexAll = false; + + /** + * Specifies the batch size for cases during indexing. Default is {@code 5000}. + */ + private Integer caseBatchSize = 5000; + + /** + * Specifies the batch size for tasks during indexing. Default is {@code 20000}. + */ + private Integer taskBatchSize = 20000; +} diff --git a/application-engine/src/main/java/com/netgrif/application/engine/workflow/web/responsebodies/Task.java b/application-engine/src/main/java/com/netgrif/application/engine/workflow/web/responsebodies/Task.java index 0731ed7923c..b226b7e8895 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/workflow/web/responsebodies/Task.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/workflow/web/responsebodies/Task.java @@ -108,7 +108,7 @@ public Task(com.netgrif.application.engine.objects.workflow.domain.Task task, Lo } public Task(ElasticTask entity) { - _id = new ProcessResourceId(entity.getStringId()); + _id = new ProcessResourceId(entity.getId()); caseId = entity.getCaseId(); transitionId = entity.getTransitionId(); title = entity.getTitle().getDefaultValue(); diff --git a/application-engine/src/main/resources/petriNets/engine-processes/menu/menu_item.xml b/application-engine/src/main/resources/petriNets/engine-processes/menu/menu_item.xml index 3df40f6a0a8..74dee709931 100644 --- a/application-engine/src/main/resources/petriNets/engine-processes/menu/menu_item.xml +++ b/application-engine/src/main/resources/petriNets/engine-processes/menu/menu_item.xml @@ -112,7 +112,7 @@ { def parentId -> - String query = String.format("processIdentifier:menu_item AND dataSet.parentId.textValue:(%s) AND dataSet.is_auto_select.booleanValue:true AND NOT stringId:%s", + String query = String.format("processIdentifier:menu_item AND dataSet.parentId.textValue:(%s) AND dataSet.is_auto_select.booleanValue:true AND NOT id:%s", parentId, useCase.stringId) def itemCase = findCaseElastic(query) if (itemCase == null) { diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/DataSearchRequestTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/DataSearchRequestTest.groovy index 8c27b085a73..0f88aadaf8a 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/DataSearchRequestTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/DataSearchRequestTest.groovy @@ -148,7 +148,7 @@ class DataSearchRequestTest { new AbstractMap.SimpleEntry("user.usernameValue.keyword" as String, "${testUser1.username}" as String), new AbstractMap.SimpleEntry("user.fullNameValue.keyword" as String, "${testUser1.name}" as String), new AbstractMap.SimpleEntry("user.userIdValue" as String, "${testUser1.getStringId()}" as String), - new AbstractMap.SimpleEntry("date.timestampValue" as String, "${Timestamp.valueOf(LocalDateTime.of(date, LocalTime.NOON)).getTime()}" as String), + new AbstractMap.SimpleEntry("date.timestampValue" as String, "${Timestamp.valueOf(LocalDateTime.of(date, LocalTime.MIDNIGHT)).getTime()}" as String), new AbstractMap.SimpleEntry("datetime.timestampValue" as String, "${Timestamp.valueOf(date.atTime(13, 37)).getTime()}" as String), new AbstractMap.SimpleEntry("enumeration" as String, "Alice" as String), new AbstractMap.SimpleEntry("enumeration" as String, "Alica" as String), diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ReindexTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ReindexTest.groovy index cf9cd55db80..12d4e038789 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ReindexTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ReindexTest.groovy @@ -84,7 +84,7 @@ class ReindexTest { savedCase.forEach(it -> { CaseSearchRequest request = new CaseSearchRequest() - request.query = "stringId:\"" + it.getStringId() + "\"" + request.query = "id:\"" + it.getStringId() + "\"" List result = elasticCaseService.search(Collections.singletonList(request), superCreator.getLoggedSuper(), PageRequest.of(0, 10), LocaleContextHolder.getLocale(), false).getContent() assert result.size() == 1 }) diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/filters/FilterImportExportTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/filters/FilterImportExportTest.groovy index 48a77df7224..81383bc8c45 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/filters/FilterImportExportTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/filters/FilterImportExportTest.groovy @@ -338,7 +338,7 @@ class FilterImportExportTest { "((creationDateSortable:[1631138400000 TO 1631224800000}) OR (creationDateSortable:[1631138400000 TO 1631311200000})) AND " + "((creationDateSortable:[1631184360000 TO 1631184420000}) OR (creationDateSortable:[1631184360000 TO 1631270820000})) AND " + "(processIdentifier:6139e51308215f25b0a498c2_all_data) AND ((taskIds:1) AND (processIdentifier:6139e51308215f25b0a498c2_all_data)) AND " + - "((author:<>) OR (!(author:7))) AND (visualId:*asdad*) AND (stringId:*asdasd*))", ["all_data", "test_net"], + "((author:<>) OR (!(author:7))) AND (visualId:*asdad*) AND (id:*asdasd*))", ["all_data", "test_net"], ["predicateMetadata": [ [[ "category" : "case_dataset", diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/CaseField.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/CaseField.java index b744625a33f..48292624f38 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/CaseField.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/CaseField.java @@ -5,6 +5,7 @@ import lombok.NoArgsConstructor; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; @Data @@ -12,8 +13,11 @@ @EqualsAndHashCode(callSuper = true) public abstract class CaseField extends FieldWithAllowedNetsField { + private List caseValue; + public CaseField(String[] fullTextValue, String[] allowedNets) { super(fullTextValue, allowedNets); + this.caseValue = Arrays.asList(fullTextValue); } @Override diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java index 011fbc43e61..682b0d3dd11 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java @@ -1,22 +1,17 @@ package com.netgrif.application.engine.objects.elastic.domain; - -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateTimeDeserializer; -import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer; import com.netgrif.application.engine.objects.petrinet.domain.dataset.ImmediateField; import com.netgrif.application.engine.objects.workflow.domain.Case; import com.netgrif.application.engine.objects.workflow.domain.TaskPair; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; -import lombok.Setter; import java.io.Serial; import java.io.Serializable; import java.sql.Timestamp; import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; import java.util.*; import java.util.stream.Collectors; @@ -35,8 +30,6 @@ public abstract class ElasticCase implements Serializable { private Long lastModified; - private String stringId; - private String visualId; private String processIdentifier; @@ -45,8 +38,6 @@ public abstract class ElasticCase implements Serializable { private String title; - @JsonSerialize(using = LocalDateTimeSerializer.class) - @JsonDeserialize(using = LocalDateTimeDeserializer.class) private LocalDateTime creationDate; private Long creationDateSortable; @@ -55,8 +46,6 @@ public abstract class ElasticCase implements Serializable { private String authorRealm; - private String mongoId; - private String authorName; private String authorUsername; @@ -91,14 +80,13 @@ public abstract class ElasticCase implements Serializable { public ElasticCase(Case useCase) { - stringId = useCase.getStringId(); - mongoId = useCase.getStringId(); //TODO: Duplication + id = useCase.getStringId(); lastModified = Timestamp.valueOf(useCase.getLastModified()).getTime(); processIdentifier = useCase.getProcessIdentifier(); processId = useCase.getPetriNetId(); visualId = useCase.getVisualId(); title = useCase.getTitle(); - creationDate = useCase.getCreationDate(); + creationDate = useCase.getCreationDate().truncatedTo(ChronoUnit.MILLIS); creationDateSortable = Timestamp.valueOf(useCase.getCreationDate()).getTime(); author = useCase.getAuthor().getId(); authorRealm = useCase.getAuthor().getRealmId(); diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java index acd6d098459..c1fec55b3da 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java @@ -21,8 +21,6 @@ public abstract class ElasticTask { private String id; - private String stringId; - private String processId; private String caseId; @@ -80,7 +78,7 @@ public abstract class ElasticTask { private Map tags; public ElasticTask(Task task) { - this.stringId = task.getStringId(); + this.id = task.getStringId(); this.processId = task.getProcessId(); this.taskId = task.getStringId(); this.caseId = task.getCaseId(); diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/serializer/LocalDateTimeJsonDeserializer.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/serializer/LocalDateTimeJsonDeserializer.java new file mode 100644 index 00000000000..af1a583de34 --- /dev/null +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/serializer/LocalDateTimeJsonDeserializer.java @@ -0,0 +1,31 @@ +package com.netgrif.application.engine.objects.elastic.serializer; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; + +import java.io.IOException; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.temporal.ChronoField; + +public class LocalDateTimeJsonDeserializer extends JsonDeserializer { + + private static final DateTimeFormatter FORMATTER = new DateTimeFormatterBuilder() + .appendPattern("yyyy-MM-dd'T'HH:mm:ss") + .optionalStart() + .appendFraction(ChronoField.MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .toFormatter(); + + @Override + public LocalDateTime deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + String value = p.getValueAsString(); + if (value == null || value.isEmpty()) { + return null; + } + return LocalDateTime.parse(value, FORMATTER); + } +} + diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/serializer/LocalDateTimeJsonSerializer.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/serializer/LocalDateTimeJsonSerializer.java new file mode 100644 index 00000000000..840042c4925 --- /dev/null +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/serializer/LocalDateTimeJsonSerializer.java @@ -0,0 +1,18 @@ +package com.netgrif.application.engine.objects.elastic.serializer; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.SerializerProvider; + +import java.io.IOException; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; + +public class LocalDateTimeJsonSerializer extends JsonSerializer { + private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS"); + + @Override + public void serialize(LocalDateTime value, JsonGenerator gen, SerializerProvider serializers) throws IOException { + gen.writeString(FORMATTER.format(value)); + } +} diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/event/events/task/IndexTaskEvent.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/event/events/task/IndexTaskEvent.java index 555ae66c7ea..7a23e284886 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/event/events/task/IndexTaskEvent.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/event/events/task/IndexTaskEvent.java @@ -15,6 +15,6 @@ public IndexTaskEvent(ElasticTask task) { } public String getMessage() { - return "IndexTaskEvent: Task [" + task.getStringId() + "] indexed"; + return "IndexTaskEvent: Task [" + task.getId() + "] indexed"; } } diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/event/events/workflow/IndexCaseEvent.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/event/events/workflow/IndexCaseEvent.java index e699bcbe3c0..92be4643885 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/event/events/workflow/IndexCaseEvent.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/event/events/workflow/IndexCaseEvent.java @@ -16,6 +16,6 @@ public IndexCaseEvent(ElasticCase elasticCase) { @Override public String getMessage() { - return "IndexCaseEvent: Case [" + elasticCase.getStringId() + "] indexed"; + return "IndexCaseEvent: Case [" + elasticCase.getId() + "] indexed"; } } diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/CaseField.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/CaseField.java index a91aca3d068..7596000d6f1 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/CaseField.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/CaseField.java @@ -3,6 +3,8 @@ import lombok.NoArgsConstructor; import org.springframework.data.elasticsearch.annotations.Field; +import java.util.List; + import static org.springframework.data.elasticsearch.annotations.FieldType.Text; @NoArgsConstructor @@ -23,4 +25,9 @@ public String[] getAllowedNets() { return super.allowedNets; } + @Override + @Field(type = Text) + public List getCaseValue() { + return super.getCaseValue(); + } } diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java index c70f335db0b..7ee9749d94d 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java @@ -38,11 +38,6 @@ public Long getVersion() { return super.getVersion(); } - @Field(type = Keyword) - public String getStringId() { - return super.getStringId(); - } - @Field(type = Keyword) public String getProcessIdentifier() { return super.getProcessIdentifier(); @@ -68,11 +63,6 @@ public String getAuthorRealm() { return super.getAuthorRealm(); } - @Field(type = Keyword) - public String getMongoId() { - return super.getMongoId(); - } - @Field(type = Keyword) public String getAuthorName() { return super.getAuthorName(); diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java index ca1a5c7dbdc..956e0469909 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java @@ -34,12 +34,6 @@ public String getId() { return super.getId(); } - @Field(type = Keyword) - @Override - public String getStringId() { - return super.getStringId(); - } - @Field(type = Keyword) @Override public String getProcessId() { From ef45e764156e08290a10b6f06966d0c31fd69bab Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 13:03:02 +0200 Subject: [PATCH 02/17] Refactor cursor handling and improve error reporting. Replaced try-with-resources for CloseableIterator with direct Iterator usage for reindexing cases and tasks. Enhanced error reporting in bulk indexing by including detailed failure messages. Minor adjustments to method calls for improved clarity and consistency. --- .../elastic/service/ElasticIndexService.java | 84 ++++++++++--------- 1 file changed, 45 insertions(+), 39 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java index 7c8d586079f..112b4b7b317 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java @@ -1,8 +1,10 @@ package com.netgrif.application.engine.elastic.service; - import co.elastic.clients.elasticsearch.ElasticsearchClient; import co.elastic.clients.elasticsearch._types.AcknowledgedResponse; +import co.elastic.clients.elasticsearch._types.ElasticsearchException; +import co.elastic.clients.elasticsearch._types.ErrorCause; +import co.elastic.clients.elasticsearch._types.ErrorResponse; import co.elastic.clients.elasticsearch.core.BulkRequest; import co.elastic.clients.elasticsearch.core.BulkResponse; import co.elastic.clients.elasticsearch.core.bulk.BulkOperation; @@ -13,7 +15,10 @@ import com.netgrif.application.engine.elastic.service.interfaces.IElasticIndexService; import com.netgrif.application.engine.elastic.service.interfaces.IElasticTaskMappingService; import com.netgrif.application.engine.objects.elastic.domain.ElasticCase; +import com.netgrif.application.engine.objects.elastic.domain.ElasticTask; import com.netgrif.application.engine.objects.workflow.domain.Case; +import com.netgrif.application.engine.objects.workflow.domain.Task; +import com.netgrif.application.engine.petrinet.service.interfaces.IPetriNetService; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.context.ApplicationContext; @@ -29,7 +34,6 @@ import org.springframework.data.elasticsearch.core.query.Query; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.query.Criteria; -import org.springframework.data.util.CloseableIterator; import org.springframework.stereotype.Service; import org.springframework.util.Assert; @@ -50,6 +54,7 @@ public class ElasticIndexService implements IElasticIndexService { private final MongoTemplate mongoTemplate; private final IElasticCaseMappingService caseMappingService; private final IElasticTaskMappingService taskMappingService; + private final IPetriNetService petriNetService; private final DataConfigurationProperties.ElasticsearchProperties elasticsearchProperties; @Override @@ -337,26 +342,26 @@ private void reindexQueried(org.springframework.data.mongodb.core.query.Query qu long page = 1, currentBatchSize = 0; List caseOperations = new ArrayList<>(); List caseIds = new ArrayList<>(); - - try (CloseableIterator cursor = mongoTemplate.stream(query, Case.class)) { - while (cursor.hasNext()) { - Case aCase = cursor.next(); - prepareCase(aCase); - ElasticCase doc = caseMappingService.transform(aCase); - prepareCaseBulkOperation(doc, caseOperations); - caseIds.add(aCase.getStringId()); - - if (++currentBatchSize == caseBatchSize || !cursor.hasNext()) { - log.info("Reindexing case page {} / {}", page, numOfPages); - executeAndValidate(caseOperations); - bulkIndexTasks(caseIds, taskBatchSize); - caseOperations.clear(); - caseIds.clear(); - currentBatchSize = 0; - page++; - } + Iterator cursor = mongoTemplate.stream(query, Case.class).iterator(); + + while (cursor.hasNext()) { + Case aCase = cursor.next(); + prepareCase(aCase); + ElasticCase doc = caseMappingService.transform(aCase); + prepareCaseBulkOperation(doc, caseOperations); + caseIds.add(aCase.getStringId()); + + if (++currentBatchSize == caseBatchSize || !cursor.hasNext()) { + log.info("Reindexing case page {} / {}", page, numOfPages); + executeAndValidate(caseOperations); + bulkIndexTasks(caseIds, taskBatchSize); + caseOperations.clear(); + caseIds.clear(); + currentBatchSize = 0; + page++; } } + } /** @@ -375,20 +380,19 @@ private void bulkIndexTasks(List caseIds, int taskBatchSize) { long page = 1, currentBatchSize = 0; List taskOperations = new ArrayList<>(); - - try (CloseableIterator cursor = mongoTemplate.stream(query, Task.class)) { - while (cursor.hasNext()) { - Task task = cursor.next(); - ElasticTask elasticTask = taskMappingService.transform(task); - prepareTaskBulkOperation(elasticTask, taskOperations); - - if (++currentBatchSize == taskBatchSize || !cursor.hasNext()) { - log.info("Reindexing task page {} / {}", page, numOfPages); - executeAndValidate(taskOperations); - taskOperations.clear(); - currentBatchSize = 0; - page++; - } + Iterator cursor = mongoTemplate.stream(query, Task.class).iterator(); + + while (cursor.hasNext()) { + Task task = cursor.next(); + ElasticTask elasticTask = taskMappingService.transform(task); + prepareTaskBulkOperation(elasticTask, taskOperations); + + if (++currentBatchSize == taskBatchSize || !cursor.hasNext()) { + log.info("Reindexing task page {} / {}", page, numOfPages); + executeAndValidate(taskOperations); + taskOperations.clear(); + currentBatchSize = 0; + page++; } } } @@ -418,14 +422,14 @@ private void prepareCaseBulkOperation(ElasticCase doc, List opera operations.add(BulkOperation.of(op -> op .update(u -> u .index(elasticsearchProperties.getIndex().get("case")) - .id(doc.getStringId()) + .id(doc.getId()) .action(a -> a .doc(doc) .docAsUpsert(true) ) ))); } catch (Exception e) { - log.error("Failed to prepare bulk operation for case [{}]: {}", doc.getStringId(), e.getMessage()); + log.error("Failed to prepare bulk operation for case [{}]: {}", doc.getId(), e.getMessage()); } } @@ -440,7 +444,7 @@ private void prepareTaskBulkOperation(ElasticTask doc, List opera operations.add(BulkOperation.of(op -> op .update(u -> u .index(elasticsearchProperties.getIndex().get("task")) - .id(doc.getStringId()) + .id(doc.getId()) .action(a -> a .doc(doc) .docAsUpsert(true) @@ -448,7 +452,7 @@ private void prepareTaskBulkOperation(ElasticTask doc, List opera )) ); } catch (Exception e) { - log.error("Failed to prepare bulk operation for task [{}]: {}", doc.getStringId(), e.getMessage()); + log.error("Failed to prepare bulk operation for task [{}]: {}", doc.getId(), e.getMessage()); } } @@ -505,7 +509,9 @@ private void checkForBulkUpdateFailure(BulkResponse response) { }); if (!failedDocuments.isEmpty()) { - throw new ElasticsearchException("Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for details [{}]", failedDocuments); + String message = "Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for details [" + failedDocuments.values() + "]"; + throw new ElasticsearchException(message, + ErrorResponse.of(builder -> builder.error(ErrorCause.of(errorCauseBuilder -> errorCauseBuilder.reason(message))))); } } From 97254f055ceaf5609109e66e79dba5620b62803e Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 13:37:41 +0200 Subject: [PATCH 03/17] Refactor repository methods to replace deprecated identifiers Updated repository methods to use `id` instead of `stringId` for consistency and clarity. Adjusted associated service and task management logic to ensure compatibility with the updated repository interface. --- .../engine/elastic/domain/ElasticCaseRepository.java | 6 ++---- .../engine/elastic/domain/ElasticTaskRepository.java | 4 +--- .../engine/elastic/service/ElasticCaseService.java | 11 +++++------ .../engine/elastic/service/ElasticIndexService.java | 4 +++- .../elastic/service/ElasticTaskQueueManager.java | 11 ++++++----- .../engine/elastic/service/ReindexingTask.java | 7 +------ 6 files changed, 18 insertions(+), 25 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticCaseRepository.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticCaseRepository.java index fa694a15a5e..4541ee225eb 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticCaseRepository.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticCaseRepository.java @@ -7,11 +7,9 @@ @Repository public interface ElasticCaseRepository extends ElasticsearchRepository { - ElasticCase findByStringId(String stringId); + long countByIdAndLastModified(String stringId, long lastUpdated); - long countByStringIdAndLastModified(String stringId, long lastUpdated); - - void deleteAllByStringId(String id); + void deleteAllById(String id); void deleteAllByProcessId(String processId); } diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticTaskRepository.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticTaskRepository.java index 86027e99fdc..b3980d0bc54 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticTaskRepository.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticTaskRepository.java @@ -9,11 +9,9 @@ @Repository public interface ElasticTaskRepository extends ElasticsearchRepository { - ElasticTask findByStringId(String stringId); - ElasticTask findByTaskId(String taskId); - void deleteAllByStringId(String taskId); + void deleteAllById(String taskId); ElasticTask deleteAllByTaskId(String taskId); diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseService.java index 554093aa59e..dbc21404678 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticCaseService.java @@ -22,8 +22,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.actuate.autoconfigure.metrics.export.elastic.ElasticProperties; import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.annotation.Lazy; import org.springframework.dao.InvalidDataAccessApiUsageException; @@ -88,7 +86,7 @@ public void setElasticProperties(DataConfigurationProperties.ElasticsearchProper @Override public void remove(String caseId) { executors.execute(caseId, () -> { - repository.deleteAllByStringId(caseId); + repository.deleteAllById(caseId); log.info("[" + caseId + "]: Case \"" + caseId + "\" deleted"); }); } @@ -105,10 +103,11 @@ public void removeByPetriNetId(String processId) { public void index(ElasticCase useCase) { executors.execute(useCase.getId(), () -> { try { - com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticCase elasticCase = repository.findByStringId(useCase.getId()); - if (elasticCase == null) { + Optional elasticCaseOptional = repository.findById(useCase.getId()); + if (elasticCaseOptional.isEmpty()) { repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticCase) useCase); } else { + com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticCase elasticCase = elasticCaseOptional.get(); elasticCase.update(useCase); repository.save(elasticCase); } @@ -116,7 +115,7 @@ public void index(ElasticCase useCase) { publisher.publishEvent(new IndexCaseEvent(useCase)); } catch (InvalidDataAccessApiUsageException ignored) { log.debug("[" + useCase.getId() + "]: Case \"" + useCase.getTitle() + "\" has duplicates, will be reindexed"); - repository.deleteAllByStringId(useCase.getId()); + repository.deleteAllById(useCase.getId()); repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticCase) useCase); log.debug("[" + useCase.getId() + "]: Case \"" + useCase.getTitle() + "\" indexed"); } diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java index 112b4b7b317..5f794c75566 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java @@ -511,7 +511,9 @@ private void checkForBulkUpdateFailure(BulkResponse response) { if (!failedDocuments.isEmpty()) { String message = "Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for details [" + failedDocuments.values() + "]"; throw new ElasticsearchException(message, - ErrorResponse.of(builder -> builder.error(ErrorCause.of(errorCauseBuilder -> errorCauseBuilder.reason(message))))); + ErrorResponse.of(builder -> builder + .error(ErrorCause.of(errorCauseBuilder -> errorCauseBuilder.reason(message))) + .status(response.items().getFirst().status()))); } } diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java index b8676f06b1a..01b93b71024 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java @@ -13,6 +13,7 @@ import jakarta.annotation.PreDestroy; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.concurrent.*; @@ -111,22 +112,22 @@ public void destroy() throws InterruptedException { elasticTaskExecutor.shutdown(); } - private ElasticTask indexTaskWorker(ElasticTask task) { log.debug("Indexing task [{}] in thread [{}]", task.getTaskId(), Thread.currentThread().getName()); com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask elasticTask = null; try { - elasticTask = repository.findByStringId(task.getId()); - if (elasticTask == null) { + Optional elasticTaskOptional = repository.findById(task.getId()); + if (elasticTaskOptional.isEmpty()) { elasticTask = repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask) task); } else { + elasticTask = elasticTaskOptional.get(); elasticTask.update(task); elasticTask = repository.save(elasticTask); } log.debug("[{}]: Task \"{}\" [{}] indexed", task.getCaseId(), task.getTitle(), task.getId()); } catch (InvalidDataAccessApiUsageException e) { log.debug("[{}]: Task \"{}\" has duplicates, will be reindexed", task.getCaseId(), task.getTitle()); - repository.deleteAllByStringId(task.getId()); + repository.deleteAllById(task.getId()); repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask) task); log.debug("[{}]: Task \"{}\" indexed", task.getCaseId(), task.getTitle()); } catch (RuntimeException e) { @@ -138,7 +139,7 @@ private ElasticTask indexTaskWorker(ElasticTask task) { private ElasticTask removeTaskWorker(ElasticTask task) { log.debug("Remove task [{}] in thread [{}]", task.getTaskId(), Thread.currentThread().getName()); try { - log.debug("[{}]: Task \"{}\" [{}] removed", task.getCaseId(), task.getTitle(), task.getStringId()); + log.debug("[{}]: Task \"{}\" [{}] removed", task.getCaseId(), task.getTitle(), task.getId()); return repository.deleteAllByTaskId(task.getTaskId()); } catch (RuntimeException e) { log.error("Elastic executor was killed before finish: {}", e.getMessage()); diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ReindexingTask.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ReindexingTask.java index c25a5080f6f..8c7307ef9e1 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ReindexingTask.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ReindexingTask.java @@ -4,18 +4,14 @@ import com.netgrif.application.engine.elastic.domain.ElasticCaseRepository; import com.netgrif.application.engine.elastic.service.interfaces.*; import com.netgrif.application.engine.objects.workflow.domain.Case; -import com.netgrif.application.engine.adapter.spring.workflow.domain.QCase; import com.netgrif.application.engine.objects.workflow.domain.Task; -import com.netgrif.application.engine.workflow.domain.repositories.CaseRepository; import com.netgrif.application.engine.workflow.domain.repositories.TaskRepository; import com.netgrif.application.engine.workflow.service.interfaces.IWorkflowService; import com.querydsl.core.types.Predicate; -import com.querydsl.core.types.dsl.BooleanExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; @@ -23,7 +19,6 @@ import org.springframework.stereotype.Component; import java.sql.Timestamp; -import java.time.Duration; import java.time.LocalDateTime; import java.util.List; @@ -93,7 +88,7 @@ private void reindexPage(Predicate predicate, int page, long numOfPages, boolean Page cases = this.workflowService.search(predicate, PageRequest.of(page, pageSize)); for (Case aCase : cases) { - if (forced || elasticCaseRepository.countByStringIdAndLastModified(aCase.getStringId(), Timestamp.valueOf(aCase.getLastModified()).getTime()) == 0) { + if (forced || elasticCaseRepository.countByIdAndLastModified(aCase.getStringId(), Timestamp.valueOf(aCase.getLastModified()).getTime()) == 0) { elasticCaseService.indexNow(this.caseMappingService.transform(aCase)); List tasks = taskRepository.findAllByCaseId(aCase.getStringId()); for (Task task : tasks) { From 82ca36ad1255a5412543ff96b219b3ba80c0897a Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 14:42:18 +0200 Subject: [PATCH 04/17] Refactor Elasticsearch configuration and object mapping setup Moved the ObjectMapper configuration from ElasticsearchConfiguration to ElasticIndexService. This improves encapsulation by localizing the mapping logic within the service, reducing unnecessary bean exposure. --- .../ElasticsearchConfiguration.java | 19 ----------- .../elastic/service/ElasticIndexService.java | 34 ++++++++++++++++++- 2 files changed, 33 insertions(+), 20 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java b/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java index bf5603226c2..ef47e1d95d5 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java @@ -1,11 +1,6 @@ package com.netgrif.application.engine.configuration; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import com.netgrif.application.engine.configuration.properties.DataConfigurationProperties; -import com.netgrif.application.engine.objects.elastic.serializer.LocalDateTimeJsonDeserializer; -import com.netgrif.application.engine.objects.elastic.serializer.LocalDateTimeJsonSerializer; import com.netgrif.application.engine.workflow.service.CaseEventHandler; import org.jetbrains.annotations.NotNull; import org.springframework.context.annotation.*; @@ -13,7 +8,6 @@ import org.springframework.data.elasticsearch.repository.config.EnableElasticsearchRepositories; import org.springframework.data.elasticsearch.support.HttpHeaders; -import java.time.LocalDateTime; import java.util.List; @Configuration @@ -80,19 +74,6 @@ public ClientConfiguration clientConfiguration() { return clientBuilder.build(); } - @Bean(name = "elasticCaseObjectMapper") - public ObjectMapper configureMapper() { - ObjectMapper mapper = new ObjectMapper(); - - JavaTimeModule javaTimeModule = new JavaTimeModule(); - javaTimeModule.addSerializer(LocalDateTime.class, new LocalDateTimeJsonSerializer()); - javaTimeModule.addDeserializer(LocalDateTime.class, new LocalDateTimeJsonDeserializer()); - - mapper.registerModule(javaTimeModule); - mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); - return mapper; - } - private boolean hasCredentials() { return elasticsearchProperties.getUsername() != null && !elasticsearchProperties.getUsername().isBlank() && elasticsearchProperties.getPassword() != null && !elasticsearchProperties.getPassword().isBlank(); diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java index 5f794c75566..6e03c8298cc 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java @@ -10,12 +10,16 @@ import co.elastic.clients.elasticsearch.core.bulk.BulkOperation; import co.elastic.clients.elasticsearch.indices.*; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import com.netgrif.application.engine.configuration.properties.DataConfigurationProperties; import com.netgrif.application.engine.elastic.service.interfaces.IElasticCaseMappingService; import com.netgrif.application.engine.elastic.service.interfaces.IElasticIndexService; import com.netgrif.application.engine.elastic.service.interfaces.IElasticTaskMappingService; import com.netgrif.application.engine.objects.elastic.domain.ElasticCase; import com.netgrif.application.engine.objects.elastic.domain.ElasticTask; +import com.netgrif.application.engine.objects.elastic.serializer.LocalDateTimeJsonDeserializer; +import com.netgrif.application.engine.objects.elastic.serializer.LocalDateTimeJsonSerializer; import com.netgrif.application.engine.objects.workflow.domain.Case; import com.netgrif.application.engine.objects.workflow.domain.Task; import com.netgrif.application.engine.petrinet.service.interfaces.IPetriNetService; @@ -44,7 +48,6 @@ @Slf4j @Service -@RequiredArgsConstructor public class ElasticIndexService implements IElasticIndexService { private static final String PLACEHOLDERS = "petriNetIndex, caseIndex, taskIndex"; @@ -56,6 +59,27 @@ public class ElasticIndexService implements IElasticIndexService { private final IElasticTaskMappingService taskMappingService; private final IPetriNetService petriNetService; private final DataConfigurationProperties.ElasticsearchProperties elasticsearchProperties; + private final ObjectMapper objectMapper; + + public ElasticIndexService(ApplicationContext context, + ElasticsearchTemplate elasticsearchTemplate, + ElasticsearchClient elasticsearchClient, + MongoTemplate mongoTemplate, + IElasticCaseMappingService caseMappingService, + IElasticTaskMappingService taskMappingService, + IPetriNetService petriNetService, + DataConfigurationProperties.ElasticsearchProperties elasticsearchProperties) { + this.context = context; + this.elasticsearchTemplate = elasticsearchTemplate; + this.elasticsearchClient = elasticsearchClient; + this.mongoTemplate = mongoTemplate; + this.caseMappingService = caseMappingService; + this.taskMappingService = taskMappingService; + this.petriNetService = petriNetService; + this.elasticsearchProperties = elasticsearchProperties; + this.objectMapper = new ObjectMapper(); + configureObjectMapper(); + } @Override public boolean indexExists(String indexName) { @@ -590,4 +614,12 @@ private String getIndexName(Class clazz, String... placeholders) { return indexName; } + private void configureObjectMapper() { + JavaTimeModule javaTimeModule = new JavaTimeModule(); + javaTimeModule.addSerializer(LocalDateTime.class, new LocalDateTimeJsonSerializer()); + javaTimeModule.addDeserializer(LocalDateTime.class, new LocalDateTimeJsonDeserializer()); + objectMapper.registerModule(javaTimeModule); + objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); + } + } From 30ca8138d3525b5b0a6bd9cea5edd2eea1c0c2fb Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 15:22:55 +0200 Subject: [PATCH 05/17] Add custom JsonpMapper for Elasticsearch configuration Introduced a custom JsonpMapper to handle Java 8 `LocalDateTime` serialization and deserialization using Jackson. This ensures proper JSON handling of datetime objects in Elasticsearch operations. --- .../ElasticsearchConfiguration.java | 23 ++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java b/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java index ef47e1d95d5..2c02ebb969e 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/configuration/ElasticsearchConfiguration.java @@ -1,6 +1,13 @@ package com.netgrif.application.engine.configuration; +import co.elastic.clients.json.JsonpMapper; +import co.elastic.clients.json.jackson.JacksonJsonpMapper; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import com.netgrif.application.engine.configuration.properties.DataConfigurationProperties; +import com.netgrif.application.engine.objects.elastic.serializer.LocalDateTimeJsonDeserializer; +import com.netgrif.application.engine.objects.elastic.serializer.LocalDateTimeJsonSerializer; import com.netgrif.application.engine.workflow.service.CaseEventHandler; import org.jetbrains.annotations.NotNull; import org.springframework.context.annotation.*; @@ -8,6 +15,7 @@ import org.springframework.data.elasticsearch.repository.config.EnableElasticsearchRepositories; import org.springframework.data.elasticsearch.support.HttpHeaders; +import java.time.LocalDateTime; import java.util.List; @Configuration @@ -74,9 +82,22 @@ public ClientConfiguration clientConfiguration() { return clientBuilder.build(); } + @NotNull + @Override + public JsonpMapper jsonpMapper() { + ObjectMapper mapper = new ObjectMapper(); + JavaTimeModule javaTimeModule = new JavaTimeModule(); + + mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); + javaTimeModule.addSerializer(LocalDateTime.class, new LocalDateTimeJsonSerializer()); + javaTimeModule.addDeserializer(LocalDateTime.class, new LocalDateTimeJsonDeserializer()); + mapper.registerModule(javaTimeModule); + return new JacksonJsonpMapper(mapper); + } + private boolean hasCredentials() { return elasticsearchProperties.getUsername() != null && !elasticsearchProperties.getUsername().isBlank() && - elasticsearchProperties.getPassword() != null && !elasticsearchProperties.getPassword().isBlank(); + elasticsearchProperties.getPassword() != null && !elasticsearchProperties.getPassword().isBlank(); } private boolean hasToken() { From 27d0da866deda9a7c6d3a172e38378cc52d353bb Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 21:45:54 +0200 Subject: [PATCH 06/17] Refactor IDs by replacing `stringId` with `id` Replaced usages of `stringId` with `id` across the codebase for consistency. Removed the `stringId` field and updated related methods, constructors, and tests to use `id`. This change improves clarity and aligns the ID handling logic with standard practices. --- .../domain/ElasticPetriNetRepository.java | 12 +--------- .../service/ElasticPetriNetService.java | 23 ++++++++++--------- .../WorkflowAuthorizationServiceTest.groovy | 6 ++--- .../engine/elastic/ElasticTaskTest.groovy | 6 ++--- .../engine/insurance/mvc/InsuranceTest.groovy | 2 +- .../domain/roles/ProcessRoleTest.groovy | 2 +- .../service/PetriNetServiceTest.groovy | 3 +-- .../objects/elastic/domain/ElasticCase.java | 6 +++-- .../elastic/domain/ElasticPetriNet.java | 10 ++++---- .../objects/elastic/domain/ElasticTask.java | 5 +++- .../spring/elastic/domain/ElasticCase.java | 6 +++-- .../elastic/domain/ElasticPetriNet.java | 11 ++++----- .../spring/elastic/domain/ElasticTask.java | 8 ++++--- 13 files changed, 48 insertions(+), 52 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticPetriNetRepository.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticPetriNetRepository.java index 7f86ffa9153..54c0b7ad48e 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticPetriNetRepository.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/domain/ElasticPetriNetRepository.java @@ -4,8 +4,6 @@ import org.springframework.stereotype.Repository; import com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticPetriNet; -import java.util.List; - /** * Repository interface for managing {@link ElasticPetriNet} entities in Elasticsearch. * Extends {@link ElasticsearchRepository} to provide CRUD operations and additional query methods. @@ -13,18 +11,10 @@ @Repository public interface ElasticPetriNetRepository extends ElasticsearchRepository { - /** - * Finds an {@link ElasticPetriNet} entity by its string ID. - * - * @param stringId the string ID of the {@link ElasticPetriNet} to find - * @return the {@link ElasticPetriNet} entity with the given string ID, or {@code null} if none found - */ - ElasticPetriNet findByStringId(String stringId); - /** * Deletes all {@link ElasticPetriNet} entities with the given string ID. * * @param id the string ID of the {@link ElasticPetriNet} entities to delete */ - void deleteAllByStringId(String id); + void deleteAllById(String id); } \ No newline at end of file diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticPetriNetService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticPetriNetService.java index 4b81efac18e..c64d947fe6a 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticPetriNetService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticPetriNetService.java @@ -63,21 +63,22 @@ public void setPetriNetService(IPetriNetService petriNetService) { @Override public void index(ElasticPetriNet net) { - executors.execute(net.getStringId(), () -> { + executors.execute(net.getId(), () -> { try { - com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticPetriNet elasticPetriNet = repository.findByStringId(net.getStringId()); - if (elasticPetriNet == null) { + Optional elasticPetriNetOptional = repository.findById(net.getId()); + if (elasticPetriNetOptional.isEmpty()) { repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticPetriNet) net); } else { - elasticPetriNet.update(net); - repository.save(elasticPetriNet); + com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticPetriNet elasticNet = elasticPetriNetOptional.get(); + elasticNet.update(net); + repository.save(elasticNet); } - log.debug("[" + net.getStringId() + "]: PetriNet \"" + net.getTitle() + "\" indexed"); + log.debug("[" + net.getId() + "]: PetriNet \"" + net.getTitle() + "\" indexed"); } catch (InvalidDataAccessApiUsageException ignored) { - log.debug("[" + net.getStringId() + "]: PetriNet \"" + net.getTitle() + "\" has duplicates, will be reindexed"); - repository.deleteAllByStringId(net.getStringId()); + log.debug("[" + net.getId() + "]: PetriNet \"" + net.getTitle() + "\" has duplicates, will be reindexed"); + repository.deleteAllById(net.getId()); repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticPetriNet) net); - log.debug("[" + net.getStringId() + "]: PetriNet \"" + net.getTitle() + "\" indexed"); + log.debug("[" + net.getId() + "]: PetriNet \"" + net.getTitle() + "\" indexed"); } }); } @@ -90,7 +91,7 @@ public void indexNow(ElasticPetriNet net) { @Override public void remove(String id) { executors.execute(id, () -> { - repository.deleteAllByStringId(id); + repository.deleteAllById(id); log.info("[" + id + "]: PetriNet \"" + id + "\" deleted"); }); } @@ -120,7 +121,7 @@ public Page search(PetriNetSearch requests, LoggedUser user, if (query != null) { SearchHits hits = template.search(query, ElasticPetriNet.class, IndexCoordinates.of(elasticsearchConfiguration.elasticPetriNetIndex())); Page indexedNets = (Page) SearchHitSupport.unwrapSearchHits(SearchHitSupport.searchPageFor(hits, query.getPageable())); - netPage = petriNetService.findAllById(indexedNets.get().map(ElasticPetriNet::getStringId).collect(Collectors.toList())); + netPage = petriNetService.findAllById(indexedNets.get().map(ElasticPetriNet::getId).collect(Collectors.toList())); total = indexedNets.getTotalElements(); log.debug("Found [{}] total elements of page [{}]", netPage.size(), pageable.getPageNumber()); } else { diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/auth/WorkflowAuthorizationServiceTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/auth/WorkflowAuthorizationServiceTest.groovy index 9dece662e17..ed8b85717bd 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/auth/WorkflowAuthorizationServiceTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/auth/WorkflowAuthorizationServiceTest.groovy @@ -149,7 +149,7 @@ class WorkflowAuthorizationServiceTest { .andExpect(status().isOk()) .andReturn() def response = parseResult(result) - String userCaseId1 = response.outcome.aCase.stringId + String userCaseId1 = response.outcome.aCase.getId result = mvc.perform(post(CREATE_CASE_URL) .content(body) @@ -158,7 +158,7 @@ class WorkflowAuthorizationServiceTest { .andExpect(status().isOk()) .andReturn() response = parseResult(result) - String userCaseId2 = response.outcome.aCase.stringId + String userCaseId2 = response.outcome.aCase.getId result = mvc.perform(post(CREATE_CASE_URL) .content(body) @@ -167,7 +167,7 @@ class WorkflowAuthorizationServiceTest { .andExpect(status().isOk()) .andReturn() response = parseResult(result) - String otherUserCaseId = response.outcome.acase.stringId + String otherUserCaseId = response.outcome.acase.getId /* TODO: momentalne vracia 200 OK, ma User vediet zmazat case ktory vytvoril Admin? mvc.perform(delete(DELETE_CASE_URL + otherUserCaseId) diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy index 71695bf6e2d..b08180d7448 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy @@ -191,7 +191,7 @@ class ElasticTaskTest { ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() task.setTaskId("TestTask") task.setTitle(new I18nString("START")) - task.setStringId("TestTask") + task.setId("TestTask") elasticTaskService.index(task) ExecutorService executorService = Executors.newFixedThreadPool(3) @@ -204,7 +204,7 @@ class ElasticTaskTest { ElasticTask taskParallel = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() taskParallel.setTaskId("TestTask") task.setTitle(new I18nString("START" + index)) - taskParallel.setStringId("TestTask") + taskParallel.setId("TestTask") Future resultFuture = elasticTaskService.scheduleTaskIndexing(taskParallel) ElasticTask result = resultFuture.get() assert result != null @@ -246,7 +246,7 @@ class ElasticTaskTest { ElasticTask taskParallel = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() taskParallel.setTaskId("TestTask"+ index) task.setTitle(new I18nString("START")) - taskParallel.setStringId("TestTask"+index) + taskParallel.setId("TestTask"+index) Future resultFuture = elasticTaskService.scheduleTaskIndexing(taskParallel) ElasticTask result = resultFuture.get() assert result != null diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/insurance/mvc/InsuranceTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/insurance/mvc/InsuranceTest.groovy index ae4ed3c1c5a..77f28bddfd6 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/insurance/mvc/InsuranceTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/insurance/mvc/InsuranceTest.groovy @@ -264,7 +264,7 @@ class InsuranceTest { // .andExpect(jsonPath('$.outcome.aCase.petriNetId', CoreMatchers.is(netId))) .andReturn() def response = parseResult(result) - caseId = response.outcome.aCase.stringId + caseId = response.outcome.aCase.getId } def searchTasks(String title, int expected) { diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/domain/roles/ProcessRoleTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/domain/roles/ProcessRoleTest.groovy index 9d7da5789a5..dcaa41354e7 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/domain/roles/ProcessRoleTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/domain/roles/ProcessRoleTest.groovy @@ -149,7 +149,7 @@ class ProcessRoleTest { .andExpect(jsonPath('$.outcome.acase.petriNetId', CoreMatchers.is(netId))) .andReturn() def response = parseResult(result) - caseId = response.outcome.acase.stringId + caseId = response.outcome.acase.getId } def searchTasks(String title, int expected) { diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/service/PetriNetServiceTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/service/PetriNetServiceTest.groovy index 3ec07b9b4b4..06f83a1682f 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/service/PetriNetServiceTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/service/PetriNetServiceTest.groovy @@ -111,7 +111,6 @@ class PetriNetServiceTest { assert petriNetRepository.count() == processCount + 1 PetriNet testNet = testNetOptional.getNet() Thread.sleep(5000) - ElasticPetriNet elasticTestNet = elasticPetriNetRepository.findByStringId(testNet.stringId) assert petriNetRepository.findById(testNet.stringId).get().uriNodeId == null importHelper.createCase("Case 1", testNet) @@ -132,7 +131,7 @@ class PetriNetServiceTest { petriNetService.deletePetriNet(testNet.stringId, superCreator.getLoggedSuper()) assert petriNetRepository.count() == processCount Thread.sleep(5000) - assert elasticPetriNetRepository.findByStringId(testNet.stringId) == null + assert elasticPetriNetRepository.findById(testNet.stringId).isPresent() assert caseRepository.findAllByProcessIdentifier(testNetOptional.getNet().getImportId()).size() == 0 assert taskRepository.count() == taskCount assert processRoleRepository.count() == processRoleCount diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java index 682b0d3dd11..4f9e95453a6 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java @@ -5,7 +5,7 @@ import com.netgrif.application.engine.objects.workflow.domain.TaskPair; import lombok.AllArgsConstructor; import lombok.Data; -import lombok.NoArgsConstructor; +import org.bson.types.ObjectId; import java.io.Serial; import java.io.Serializable; @@ -17,7 +17,6 @@ @Data -@NoArgsConstructor @AllArgsConstructor public abstract class ElasticCase implements Serializable { @@ -78,6 +77,9 @@ public abstract class ElasticCase implements Serializable { private Map tags; + public ElasticCase() { + this.id = new ObjectId().toString(); + } public ElasticCase(Case useCase) { id = useCase.getStringId(); diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticPetriNet.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticPetriNet.java index e4be87c6c0e..ec73eefb32f 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticPetriNet.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticPetriNet.java @@ -12,11 +12,8 @@ import lombok.Data; import lombok.NoArgsConstructor; import java.time.LocalDateTime; -import java.util.HashSet; -import java.util.Set; @Data -@NoArgsConstructor @AllArgsConstructor public abstract class ElasticPetriNet { @@ -28,7 +25,6 @@ public abstract class ElasticPetriNet { private String uriNodeId; - private String stringId; private I18nField title; @@ -38,11 +34,15 @@ public abstract class ElasticPetriNet { @JsonDeserialize(using = LocalDateTimeDeserializer.class) private LocalDateTime creationDate; + public ElasticPetriNet() { + this.id = new Object().toString(); + } + public ElasticPetriNet(PetriNet net) { + this.id = net.getStringId(); this.identifier = net.getIdentifier(); this.version = net.getVersion(); this.uriNodeId = net.getUriNodeId(); - this.stringId = net.getStringId(); this.title = this.transformToField(net.getTitle()); this.initials = net.getInitials(); this.creationDate = net.getCreationDate(); diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java index c1fec55b3da..83b3afc7cc3 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java @@ -15,7 +15,6 @@ import java.util.Set; @Data -@NoArgsConstructor @AllArgsConstructor public abstract class ElasticTask { @@ -77,6 +76,10 @@ public abstract class ElasticTask { private Map tags; + public ElasticTask() { + this.id = new Object().toString(); + } + public ElasticTask(Task task) { this.id = task.getStringId(); this.processId = task.getProcessId(); diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java index 7ee9749d94d..b061a524043 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java @@ -1,7 +1,6 @@ package com.netgrif.application.engine.adapter.spring.elastic.domain; import com.netgrif.application.engine.objects.workflow.domain.Case; -import lombok.NoArgsConstructor; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.Version; import org.springframework.data.elasticsearch.annotations.DateFormat; @@ -16,10 +15,13 @@ import static org.springframework.data.elasticsearch.annotations.FieldType.Flattened; import static org.springframework.data.elasticsearch.annotations.FieldType.Keyword; -@NoArgsConstructor @Document(indexName = "#{@elasticCaseIndex}") public class ElasticCase extends com.netgrif.application.engine.objects.elastic.domain.ElasticCase { + public ElasticCase() { + super(); + } + public ElasticCase(Case useCase) { super(useCase); } diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticPetriNet.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticPetriNet.java index f05f726a5ab..8834373d2a3 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticPetriNet.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticPetriNet.java @@ -16,10 +16,13 @@ import static org.springframework.data.elasticsearch.annotations.FieldType.Keyword; -@NoArgsConstructor @Document(indexName = "#{@elasticPetriNetIndex}") public class ElasticPetriNet extends com.netgrif.application.engine.objects.elastic.domain.ElasticPetriNet { + public ElasticPetriNet() { + super(); + } + public ElasticPetriNet(PetriNet net) { super(net); } @@ -46,12 +49,6 @@ public String getUriNodeId() { return super.getUriNodeId(); } - @Field(type = Keyword) - @Override - public String getStringId() { - return super.getStringId(); - } - @Field(type = Keyword) @Override public String getInitials() { diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java index 956e0469909..be622f8e407 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java @@ -1,8 +1,6 @@ package com.netgrif.application.engine.adapter.spring.elastic.domain; -import com.netgrif.application.engine.objects.petrinet.domain.I18nString; import com.netgrif.application.engine.objects.workflow.domain.Task; -import lombok.NoArgsConstructor; import org.springframework.data.annotation.Id; import org.springframework.data.elasticsearch.annotations.DateFormat; import org.springframework.data.elasticsearch.annotations.Document; @@ -16,10 +14,14 @@ import static org.springframework.data.elasticsearch.annotations.FieldType.Flattened; import static org.springframework.data.elasticsearch.annotations.FieldType.Keyword; -@NoArgsConstructor + @Document(indexName = "#{@elasticTaskIndex}") public class ElasticTask extends com.netgrif.application.engine.objects.elastic.domain.ElasticTask { + public ElasticTask() { + super(); + } + public ElasticTask(Task task) { super(task); } From 945b4cb70fc055f77c2c2cba6b6c01825174f9ba Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 21:51:27 +0200 Subject: [PATCH 07/17] Update validation import to use Jakarta package Replaced the `javax.validation.Valid` import with `jakarta.validation.Valid` to align with the newer Jakarta API standards. This ensures compatibility with updated dependencies and maintains consistency across the project. --- .../configuration/properties/DataConfigurationProperties.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java b/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java index fff7f31cd9a..d30a25f4df0 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java @@ -15,7 +15,7 @@ import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import javax.validation.Valid; +import jakarta.validation.Valid; import java.time.Duration; import java.util.*; From cc9ebea0c086d81f612a960fc0359214c4cafd97 Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 21:53:30 +0200 Subject: [PATCH 08/17] Add @ToString.Exclude to sensitive properties Added @ToString.Exclude annotation to `username`, `password`, and `token` fields in `DataConfigurationProperties` to prevent sensitive information from being exposed in the `toString()` method. This ensures better protection of sensitive credentials in log output or debugging scenarios. --- .../configuration/properties/DataConfigurationProperties.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java b/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java index d30a25f4df0..0b0d4e2d6d1 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/configuration/properties/DataConfigurationProperties.java @@ -5,6 +5,7 @@ import lombok.Data; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; +import lombok.ToString; import org.springframework.boot.autoconfigure.data.rest.RepositoryRestProperties; import org.springframework.boot.autoconfigure.session.RedisSessionProperties; import org.springframework.boot.context.properties.ConfigurationProperties; @@ -177,16 +178,19 @@ public static class ElasticsearchProperties { /** * The username used for authenticating with the Elasticsearch server. */ + @ToString.Exclude private String username = null; /** * The password used for authenticating with the Elasticsearch server. */ + @ToString.Exclude private String password = null; /** * The authentication token for the Elasticsearch server, when using token-based authentication. */ + @ToString.Exclude private String token = null; /** From d1f209c276c191649821ec32ded0ec28a5f15898 Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 22:00:38 +0200 Subject: [PATCH 09/17] Refactor reindexing to use try-with-resources and Stream API. Replaced manual iterators with try-with-resources for streamlined resource management using Stream API. Adjusted batch processing logic to improve safety and clarity. This enhances maintainability and reduces resource leakage risks. --- .../elastic/service/ElasticIndexService.java | 70 ++++++++++--------- 1 file changed, 37 insertions(+), 33 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java index 6e03c8298cc..05c4fbc53ef 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticIndexService.java @@ -23,7 +23,6 @@ import com.netgrif.application.engine.objects.workflow.domain.Case; import com.netgrif.application.engine.objects.workflow.domain.Task; import com.netgrif.application.engine.petrinet.service.interfaces.IPetriNetService; -import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.context.ApplicationContext; import org.springframework.core.io.Resource; @@ -45,6 +44,7 @@ import java.lang.reflect.Field; import java.time.LocalDateTime; import java.util.*; +import java.util.stream.Stream; @Slf4j @Service @@ -359,30 +359,32 @@ public void bulkIndex(boolean indexAll, LocalDateTime after, Integer caseBatchSi * @param taskBatchSize batch size for tasks */ private void reindexQueried(org.springframework.data.mongodb.core.query.Query query, long count, int caseBatchSize, int taskBatchSize) { - long numOfPages = ((count / caseBatchSize) + 1); + long numOfPages = Math.max(1, Math.ceilDiv(count, (long) caseBatchSize)); log.info("Reindexing {} pages", numOfPages); query.cursorBatchSize(caseBatchSize); long page = 1, currentBatchSize = 0; List caseOperations = new ArrayList<>(); List caseIds = new ArrayList<>(); - Iterator cursor = mongoTemplate.stream(query, Case.class).iterator(); - - while (cursor.hasNext()) { - Case aCase = cursor.next(); - prepareCase(aCase); - ElasticCase doc = caseMappingService.transform(aCase); - prepareCaseBulkOperation(doc, caseOperations); - caseIds.add(aCase.getStringId()); - - if (++currentBatchSize == caseBatchSize || !cursor.hasNext()) { - log.info("Reindexing case page {} / {}", page, numOfPages); - executeAndValidate(caseOperations); - bulkIndexTasks(caseIds, taskBatchSize); - caseOperations.clear(); - caseIds.clear(); - currentBatchSize = 0; - page++; + + try (Stream cursorStream = mongoTemplate.stream(query, Case.class)) { + Iterator cursor = cursorStream.iterator(); + while (cursor.hasNext()) { + Case aCase = cursor.next(); + prepareCase(aCase); + ElasticCase doc = caseMappingService.transform(aCase); + prepareCaseBulkOperation(doc, caseOperations); + caseIds.add(aCase.getStringId()); + + if (++currentBatchSize == caseBatchSize || !cursor.hasNext()) { + log.info("Reindexing case page {} / {}", page, numOfPages); + executeAndValidate(caseOperations); + bulkIndexTasks(caseIds, taskBatchSize); + caseOperations.clear(); + caseIds.clear(); + currentBatchSize = 0; + page++; + } } } @@ -400,23 +402,25 @@ private void bulkIndexTasks(List caseIds, int taskBatchSize) { } org.springframework.data.mongodb.core.query.Query query = org.springframework.data.mongodb.core.query.Query.query(Criteria.where("caseId").in(caseIds)).cursorBatchSize(taskBatchSize); long totalSize = mongoTemplate.count(query, Task.class); - long numOfPages = ((totalSize / taskBatchSize) + 1); + long numOfPages = Math.max(1, Math.ceilDiv(totalSize, (long) taskBatchSize)); long page = 1, currentBatchSize = 0; List taskOperations = new ArrayList<>(); - Iterator cursor = mongoTemplate.stream(query, Task.class).iterator(); - - while (cursor.hasNext()) { - Task task = cursor.next(); - ElasticTask elasticTask = taskMappingService.transform(task); - prepareTaskBulkOperation(elasticTask, taskOperations); - - if (++currentBatchSize == taskBatchSize || !cursor.hasNext()) { - log.info("Reindexing task page {} / {}", page, numOfPages); - executeAndValidate(taskOperations); - taskOperations.clear(); - currentBatchSize = 0; - page++; + + try (Stream cursorStream = mongoTemplate.stream(query, Task.class)) { + Iterator cursor = cursorStream.iterator(); + while (cursor.hasNext()) { + Task task = cursor.next(); + ElasticTask elasticTask = taskMappingService.transform(task); + prepareTaskBulkOperation(elasticTask, taskOperations); + + if (++currentBatchSize == taskBatchSize || !cursor.hasNext()) { + log.info("Reindexing task page {} / {}", page, numOfPages); + executeAndValidate(taskOperations); + taskOperations.clear(); + currentBatchSize = 0; + page++; + } } } } From 03453446f95580b36e0c9fffacd3d12108eb19ce Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 22:06:01 +0200 Subject: [PATCH 10/17] Add support for reindexing from a specific timestamp Introduced a new `lastRun` parameter in `IndexParams` to specify a starting point based on the `lastModifiedDate` of cases. Updated `ElasticController` to pass this parameter to the `bulkIndex` method, enabling more targeted reindexing operations. --- .../application/engine/elastic/web/ElasticController.java | 2 +- .../engine/elastic/web/requestbodies/IndexParams.java | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/ElasticController.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/ElasticController.java index 809c0e3efea..b959465a8c9 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/ElasticController.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/ElasticController.java @@ -125,7 +125,7 @@ public MessageResource reindex(@RequestBody Map searchBody, Auth @PostMapping(value = "/reindex/bulk", produces = MediaType.APPLICATION_JSON_VALUE) public MessageResource bulkReindex(IndexParams indexParams) { try { - indexService.bulkIndex(indexParams.isIndexAll(), null, indexParams.getCaseBatchSize(), indexParams.getTaskBatchSize()); + indexService.bulkIndex(indexParams.isIndexAll(), indexParams.getLastRun(), indexParams.getCaseBatchSize(), indexParams.getTaskBatchSize()); return MessageResource.successMessage("Success"); } catch (Exception e) { log.error("Could not index: ", e); diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/requestbodies/IndexParams.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/requestbodies/IndexParams.java index 17710bbe22f..9b5930e5738 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/requestbodies/IndexParams.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/web/requestbodies/IndexParams.java @@ -2,6 +2,8 @@ import lombok.Data; +import java.time.LocalDateTime; + /** * Represents the parameters to configure the indexing operation. * This class allows customization of batch sizes for cases and tasks, @@ -24,4 +26,10 @@ public class IndexParams { * Specifies the batch size for tasks during indexing. Default is {@code 20000}. */ private Integer taskBatchSize = 20000; + + + /** + * Specifies the date, from which to consider lastModifiedDate property of cases. + */ + private LocalDateTime lastRun = null; } From 90c11122433d9d3d32b760d7c34a8e2365f84413 Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Tue, 19 Aug 2025 22:13:46 +0200 Subject: [PATCH 11/17] Fix incorrect method parameter in task removal Updated the repository call to use the correct parameter `task.getId()` instead of `task.getTaskId()`. This ensures that tasks are properly identified and removed, preventing potential errors during task deletion. --- .../engine/elastic/service/ElasticTaskQueueManager.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java index 01b93b71024..4f4b12f364a 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java @@ -140,7 +140,7 @@ private ElasticTask removeTaskWorker(ElasticTask task) { log.debug("Remove task [{}] in thread [{}]", task.getTaskId(), Thread.currentThread().getName()); try { log.debug("[{}]: Task \"{}\" [{}] removed", task.getCaseId(), task.getTitle(), task.getId()); - return repository.deleteAllByTaskId(task.getTaskId()); + return repository.deleteAllByTaskId(task.getId()); } catch (RuntimeException e) { log.error("Elastic executor was killed before finish: {}", e.getMessage()); } From 7ee5e8954ba862e59f0765d3d9786f078f57084a Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Wed, 20 Aug 2025 07:43:00 +0200 Subject: [PATCH 12/17] Fix inconsistent task ID usage in task removal process Previously, the task removal process logged and deleted tasks using inconsistent ID references (`getId` vs. `getTaskId`). This change ensures consistent usage of `getTaskId` for both logging and deletion, preventing potential errors and improving clarity. --- .../engine/elastic/service/ElasticTaskQueueManager.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java index 4f4b12f364a..9d8d828781b 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java @@ -139,8 +139,8 @@ private ElasticTask indexTaskWorker(ElasticTask task) { private ElasticTask removeTaskWorker(ElasticTask task) { log.debug("Remove task [{}] in thread [{}]", task.getTaskId(), Thread.currentThread().getName()); try { - log.debug("[{}]: Task \"{}\" [{}] removed", task.getCaseId(), task.getTitle(), task.getId()); - return repository.deleteAllByTaskId(task.getId()); + log.debug("[{}]: Task \"{}\" [{}] removed", task.getCaseId(), task.getTitle(), task.getTaskId()); + return repository.deleteAllByTaskId(task.getTaskId()); } catch (RuntimeException e) { log.error("Elastic executor was killed before finish: {}", e.getMessage()); } From fbaafc3ee7ac43a13ca90c82592b8cc0fd23e164 Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Wed, 20 Aug 2025 07:44:15 +0200 Subject: [PATCH 13/17] Refactor case ID field access in test assertions Replaced the deprecated `getId` method with direct `.id` field access for better readability and consistency. This update ensures compatibility with the current data structure and cleans up test code syntax. --- .../engine/auth/WorkflowAuthorizationServiceTest.groovy | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/auth/WorkflowAuthorizationServiceTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/auth/WorkflowAuthorizationServiceTest.groovy index ed8b85717bd..6191dd7946e 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/auth/WorkflowAuthorizationServiceTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/auth/WorkflowAuthorizationServiceTest.groovy @@ -149,7 +149,7 @@ class WorkflowAuthorizationServiceTest { .andExpect(status().isOk()) .andReturn() def response = parseResult(result) - String userCaseId1 = response.outcome.aCase.getId + String userCaseId1 = response.outcome.aCase.id result = mvc.perform(post(CREATE_CASE_URL) .content(body) @@ -158,7 +158,7 @@ class WorkflowAuthorizationServiceTest { .andExpect(status().isOk()) .andReturn() response = parseResult(result) - String userCaseId2 = response.outcome.aCase.getId + String userCaseId2 = response.outcome.aCase.id result = mvc.perform(post(CREATE_CASE_URL) .content(body) @@ -167,7 +167,7 @@ class WorkflowAuthorizationServiceTest { .andExpect(status().isOk()) .andReturn() response = parseResult(result) - String otherUserCaseId = response.outcome.acase.getId + String otherUserCaseId = response.outcome.aCase.id /* TODO: momentalne vracia 200 OK, ma User vediet zmazat case ktory vytvoril Admin? mvc.perform(delete(DELETE_CASE_URL + otherUserCaseId) From 4850288b35782d918152294088758a8e59faf2fe Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Wed, 20 Aug 2025 08:14:03 +0200 Subject: [PATCH 14/17] Remove explicit constructors and add @NoArgsConstructor annotations Removed explicit no-argument constructors across Elastic* domain objects and replaced them with the @NoArgsConstructor annotation for cleaner and more consistent code. Additionally, adjusted test cases to properly initialize task IDs where required and updated instance checks to reflect repository changes. --- .../engine/elastic/ElasticTaskTest.groovy | 18 +++++++++++------- .../engine/insurance/mvc/InsuranceTest.groovy | 2 +- .../domain/roles/ProcessRoleTest.groovy | 2 +- .../service/PetriNetServiceTest.groovy | 2 +- .../objects/elastic/domain/ElasticCase.java | 7 ++----- .../elastic/domain/ElasticPetriNet.java | 6 +----- .../objects/elastic/domain/ElasticTask.java | 5 +---- .../spring/elastic/domain/ElasticCase.java | 6 ++---- .../spring/elastic/domain/ElasticPetriNet.java | 5 +---- .../spring/elastic/domain/ElasticTask.java | 7 ++----- 10 files changed, 23 insertions(+), 37 deletions(-) diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy index b08180d7448..2e1151b1c87 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy @@ -93,6 +93,7 @@ class ElasticTaskTest { @Test void testIndexTask() { ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() + task.setId("TestTask") task.setTaskId("TestTask") task.setTitle(new I18nString("Test")) task.setProcessId("Process") @@ -126,6 +127,7 @@ class ElasticTaskTest { @Test void testRemoveTaskByProcess() throws Exception { ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() + task.setId("TestTask") task.setTaskId("TestTask") task.setTitle(new I18nString("Test")) task.setProcessId("Process") @@ -148,6 +150,7 @@ class ElasticTaskTest { void reindexTaskAllTest() throws InterruptedException, ExecutionException { int pocetOpakovani = 100 ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() + task.setId("TestTask") task.setTaskId("TestTask") task.setTitle(new I18nString("Test")) task.setProcessId("TestProcess") @@ -160,7 +163,8 @@ class ElasticTaskTest { executorService.submit(() -> { try { ElasticTask taskParallel = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - taskParallel.setTaskId("TestTask") + task.setId("TestTask" + index) + task.setTaskId("TestTask" + index) task.setTitle(new I18nString("START" + index)) taskParallel.setProcessId("TestProcess") Future resultFuture = elasticTaskService.scheduleTaskIndexing(taskParallel) @@ -189,9 +193,9 @@ class ElasticTaskTest { void reindexTaskTest() throws InterruptedException, ExecutionException { int pocetOpakovani = 100 ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() + task.setId("TestTask") task.setTaskId("TestTask") task.setTitle(new I18nString("START")) - task.setId("TestTask") elasticTaskService.index(task) ExecutorService executorService = Executors.newFixedThreadPool(3) @@ -202,9 +206,9 @@ class ElasticTaskTest { executorService.submit(() -> { try { ElasticTask taskParallel = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - taskParallel.setTaskId("TestTask") + task.setId("TestTask" + index) + task.setTaskId("TestTask" + index) task.setTitle(new I18nString("START" + index)) - taskParallel.setId("TestTask") Future resultFuture = elasticTaskService.scheduleTaskIndexing(taskParallel) ElasticTask result = resultFuture.get() assert result != null @@ -232,7 +236,7 @@ class ElasticTaskTest { void reindexTaskParallelTest() throws InterruptedException, ExecutionException { int pocetOpakovani = 1000 ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - task.setTaskId("TestTask") + task.setId("TestTask") task.setTitle(new I18nString("START")) elasticTaskService.index(task) @@ -244,9 +248,9 @@ class ElasticTaskTest { executorService.submit(() -> { try { ElasticTask taskParallel = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - taskParallel.setTaskId("TestTask"+ index) + task.setId("TestTask" + index) + task.setTaskId("TestTask" + index) task.setTitle(new I18nString("START")) - taskParallel.setId("TestTask"+index) Future resultFuture = elasticTaskService.scheduleTaskIndexing(taskParallel) ElasticTask result = resultFuture.get() assert result != null diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/insurance/mvc/InsuranceTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/insurance/mvc/InsuranceTest.groovy index 77f28bddfd6..7f52e6c0fb7 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/insurance/mvc/InsuranceTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/insurance/mvc/InsuranceTest.groovy @@ -264,7 +264,7 @@ class InsuranceTest { // .andExpect(jsonPath('$.outcome.aCase.petriNetId', CoreMatchers.is(netId))) .andReturn() def response = parseResult(result) - caseId = response.outcome.aCase.getId + caseId = response.outcome.aCase.id } def searchTasks(String title, int expected) { diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/domain/roles/ProcessRoleTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/domain/roles/ProcessRoleTest.groovy index dcaa41354e7..d2fced36c8d 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/domain/roles/ProcessRoleTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/domain/roles/ProcessRoleTest.groovy @@ -149,7 +149,7 @@ class ProcessRoleTest { .andExpect(jsonPath('$.outcome.acase.petriNetId', CoreMatchers.is(netId))) .andReturn() def response = parseResult(result) - caseId = response.outcome.acase.getId + caseId = response.outcome.aCase.id } def searchTasks(String title, int expected) { diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/service/PetriNetServiceTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/service/PetriNetServiceTest.groovy index 06f83a1682f..84ce8b5ee0c 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/service/PetriNetServiceTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/petrinet/service/PetriNetServiceTest.groovy @@ -131,7 +131,7 @@ class PetriNetServiceTest { petriNetService.deletePetriNet(testNet.stringId, superCreator.getLoggedSuper()) assert petriNetRepository.count() == processCount Thread.sleep(5000) - assert elasticPetriNetRepository.findById(testNet.stringId).isPresent() + assert elasticPetriNetRepository.findById(testNet.stringId).isEmpty() assert caseRepository.findAllByProcessIdentifier(testNetOptional.getNet().getImportId()).size() == 0 assert taskRepository.count() == taskCount assert processRoleRepository.count() == processRoleCount diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java index 4f9e95453a6..cd85215b31d 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticCase.java @@ -5,7 +5,7 @@ import com.netgrif.application.engine.objects.workflow.domain.TaskPair; import lombok.AllArgsConstructor; import lombok.Data; -import org.bson.types.ObjectId; +import lombok.NoArgsConstructor; import java.io.Serial; import java.io.Serializable; @@ -17,6 +17,7 @@ @Data +@NoArgsConstructor @AllArgsConstructor public abstract class ElasticCase implements Serializable { @@ -77,10 +78,6 @@ public abstract class ElasticCase implements Serializable { private Map tags; - public ElasticCase() { - this.id = new ObjectId().toString(); - } - public ElasticCase(Case useCase) { id = useCase.getStringId(); lastModified = Timestamp.valueOf(useCase.getLastModified()).getTime(); diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticPetriNet.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticPetriNet.java index ec73eefb32f..bdec2b24c71 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticPetriNet.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticPetriNet.java @@ -14,6 +14,7 @@ import java.time.LocalDateTime; @Data +@NoArgsConstructor @AllArgsConstructor public abstract class ElasticPetriNet { @@ -25,7 +26,6 @@ public abstract class ElasticPetriNet { private String uriNodeId; - private I18nField title; private String initials; @@ -34,10 +34,6 @@ public abstract class ElasticPetriNet { @JsonDeserialize(using = LocalDateTimeDeserializer.class) private LocalDateTime creationDate; - public ElasticPetriNet() { - this.id = new Object().toString(); - } - public ElasticPetriNet(PetriNet net) { this.id = net.getStringId(); this.identifier = net.getIdentifier(); diff --git a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java index 83b3afc7cc3..c1fec55b3da 100644 --- a/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java +++ b/nae-object-library/src/main/java/com/netgrif/application/engine/objects/elastic/domain/ElasticTask.java @@ -15,6 +15,7 @@ import java.util.Set; @Data +@NoArgsConstructor @AllArgsConstructor public abstract class ElasticTask { @@ -76,10 +77,6 @@ public abstract class ElasticTask { private Map tags; - public ElasticTask() { - this.id = new Object().toString(); - } - public ElasticTask(Task task) { this.id = task.getStringId(); this.processId = task.getProcessId(); diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java index b061a524043..7ee9749d94d 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticCase.java @@ -1,6 +1,7 @@ package com.netgrif.application.engine.adapter.spring.elastic.domain; import com.netgrif.application.engine.objects.workflow.domain.Case; +import lombok.NoArgsConstructor; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.Version; import org.springframework.data.elasticsearch.annotations.DateFormat; @@ -15,13 +16,10 @@ import static org.springframework.data.elasticsearch.annotations.FieldType.Flattened; import static org.springframework.data.elasticsearch.annotations.FieldType.Keyword; +@NoArgsConstructor @Document(indexName = "#{@elasticCaseIndex}") public class ElasticCase extends com.netgrif.application.engine.objects.elastic.domain.ElasticCase { - public ElasticCase() { - super(); - } - public ElasticCase(Case useCase) { super(useCase); } diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticPetriNet.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticPetriNet.java index 8834373d2a3..5794c8a52dc 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticPetriNet.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticPetriNet.java @@ -16,13 +16,10 @@ import static org.springframework.data.elasticsearch.annotations.FieldType.Keyword; +@NoArgsConstructor @Document(indexName = "#{@elasticPetriNetIndex}") public class ElasticPetriNet extends com.netgrif.application.engine.objects.elastic.domain.ElasticPetriNet { - public ElasticPetriNet() { - super(); - } - public ElasticPetriNet(PetriNet net) { super(net); } diff --git a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java index be622f8e407..cb988a814d5 100644 --- a/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java +++ b/nae-spring-core-adapter/src/main/java/com/netgrif/application/engine/adapter/spring/elastic/domain/ElasticTask.java @@ -1,6 +1,7 @@ package com.netgrif.application.engine.adapter.spring.elastic.domain; import com.netgrif.application.engine.objects.workflow.domain.Task; +import lombok.NoArgsConstructor; import org.springframework.data.annotation.Id; import org.springframework.data.elasticsearch.annotations.DateFormat; import org.springframework.data.elasticsearch.annotations.Document; @@ -14,14 +15,10 @@ import static org.springframework.data.elasticsearch.annotations.FieldType.Flattened; import static org.springframework.data.elasticsearch.annotations.FieldType.Keyword; - +@NoArgsConstructor @Document(indexName = "#{@elasticTaskIndex}") public class ElasticTask extends com.netgrif.application.engine.objects.elastic.domain.ElasticTask { - public ElasticTask() { - super(); - } - public ElasticTask(Task task) { super(task); } From 108c0895bda085c6f984d00c5f94e8c40ffba309 Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Wed, 20 Aug 2025 08:25:58 +0200 Subject: [PATCH 15/17] Fix null check for task ID in ElasticTaskQueueManager Updated the null check and logging to use the correct method `getId()` instead of `getTaskId()`. This ensures proper validation and accurate debug logs for scheduled tasks. --- .../engine/elastic/service/ElasticTaskQueueManager.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java index 9d8d828781b..a05f364894f 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java @@ -35,11 +35,11 @@ public ElasticTaskQueueManager(@Qualifier("elasticTaskExecutor") ThreadPoolTaskE public Future scheduleOperation(ElasticTaskJob task) { - if (task.getTask().getTaskId() == null) { + if (task.getTask().getId() == null) { throw new IllegalArgumentException("Task id cannot be null"); } - String taskId = task.getTaskId(); + String taskId = task.getTask().getId(); log.debug("Scheduling operation for task: {}", taskId); CompletableFuture future = new CompletableFuture<>(); From f141ee927f250a7dfa8946fae7f089200618ab85 Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Wed, 20 Aug 2025 09:51:47 +0200 Subject: [PATCH 16/17] Refactor task ID handling in ElasticTask operations Replace redundant `getTask().getId()` calls with `getTaskId()` for clarity and simplify task identification. Update related test cases to reflect changes and ensure consistency. --- .../engine/elastic/service/ElasticTaskQueueManager.java | 6 +++--- .../application/engine/elastic/ElasticTaskTest.groovy | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java index a05f364894f..0c894b2f9be 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java @@ -35,11 +35,11 @@ public ElasticTaskQueueManager(@Qualifier("elasticTaskExecutor") ThreadPoolTaskE public Future scheduleOperation(ElasticTaskJob task) { - if (task.getTask().getId() == null) { + if (task.getTaskId() == null) { throw new IllegalArgumentException("Task id cannot be null"); } - String taskId = task.getTask().getId(); + String taskId = task.getTaskId(); log.debug("Scheduling operation for task: {}", taskId); CompletableFuture future = new CompletableFuture<>(); @@ -128,7 +128,7 @@ private ElasticTask indexTaskWorker(ElasticTask task) { } catch (InvalidDataAccessApiUsageException e) { log.debug("[{}]: Task \"{}\" has duplicates, will be reindexed", task.getCaseId(), task.getTitle()); repository.deleteAllById(task.getId()); - repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask) task); + elasticTask = repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask) task); log.debug("[{}]: Task \"{}\" indexed", task.getCaseId(), task.getTitle()); } catch (RuntimeException e) { log.error("Elastic executor was killed before finish: {}", e.getMessage()); diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy index 2e1151b1c87..4295e76552c 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy @@ -127,8 +127,8 @@ class ElasticTaskTest { @Test void testRemoveTaskByProcess() throws Exception { ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - task.setId("TestTask") - task.setTaskId("TestTask") + task.setId("TestTask1") + task.setTaskId("TestTask1") task.setTitle(new I18nString("Test")) task.setProcessId("Process") @@ -136,13 +136,13 @@ class ElasticTaskTest { ElasticTask result = resultFuture.get() assert result - ElasticTask indexedTask = elasticTaskRepository.findByTaskId("TestTask") + ElasticTask indexedTask = elasticTaskRepository.findByTaskId("TestTask1") assert indexedTask != null CountDownLatch latch = new CountDownLatch(1) elasticTaskService.removeByPetriNetId("Process") latch.await(10, TimeUnit.SECONDS) - ElasticTask deletedTask = elasticTaskRepository.findByTaskId("TestTask") + ElasticTask deletedTask = elasticTaskRepository.findByTaskId("TestTask1") assert deletedTask == null } From 30def809ec8ec26c200a1b145077c30c24ccbdcf Mon Sep 17 00:00:00 2001 From: renczesstefan Date: Wed, 20 Aug 2025 14:49:11 +0200 Subject: [PATCH 17/17] Update logging and tests for task indexing in Elastic service Enhanced logging to include task ID during indexing after duplicate cleanup. Corrected variable assignments and made minor adjustments in test cases to fix and align task ID consistency for better clarity and reliability. --- .../service/ElasticTaskQueueManager.java | 2 +- .../engine/elastic/ElasticTaskTest.groovy | 27 ++++++++++--------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java index 0c894b2f9be..85a505821bd 100644 --- a/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java +++ b/application-engine/src/main/java/com/netgrif/application/engine/elastic/service/ElasticTaskQueueManager.java @@ -129,7 +129,7 @@ private ElasticTask indexTaskWorker(ElasticTask task) { log.debug("[{}]: Task \"{}\" has duplicates, will be reindexed", task.getCaseId(), task.getTitle()); repository.deleteAllById(task.getId()); elasticTask = repository.save((com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask) task); - log.debug("[{}]: Task \"{}\" indexed", task.getCaseId(), task.getTitle()); + log.debug("[{}]: Task \"{}\" [{}] indexed after duplicate cleanup", task.getCaseId(), task.getTitle(), task.getId()); } catch (RuntimeException e) { log.error("Elastic executor was killed before finish: {}", e.getMessage()); } diff --git a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy index 4295e76552c..5ff6a16f712 100644 --- a/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy +++ b/application-engine/src/test/groovy/com/netgrif/application/engine/elastic/ElasticTaskTest.groovy @@ -127,8 +127,8 @@ class ElasticTaskTest { @Test void testRemoveTaskByProcess() throws Exception { ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - task.setId("TestTask1") - task.setTaskId("TestTask1") + task.setId("TestTask") + task.setTaskId("TestTask") task.setTitle(new I18nString("Test")) task.setProcessId("Process") @@ -136,13 +136,13 @@ class ElasticTaskTest { ElasticTask result = resultFuture.get() assert result - ElasticTask indexedTask = elasticTaskRepository.findByTaskId("TestTask1") + ElasticTask indexedTask = elasticTaskRepository.findByTaskId("TestTask") assert indexedTask != null CountDownLatch latch = new CountDownLatch(1) elasticTaskService.removeByPetriNetId("Process") latch.await(10, TimeUnit.SECONDS) - ElasticTask deletedTask = elasticTaskRepository.findByTaskId("TestTask1") + ElasticTask deletedTask = elasticTaskRepository.findByTaskId("TestTask") assert deletedTask == null } @@ -163,9 +163,9 @@ class ElasticTaskTest { executorService.submit(() -> { try { ElasticTask taskParallel = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - task.setId("TestTask" + index) - task.setTaskId("TestTask" + index) - task.setTitle(new I18nString("START" + index)) + taskParallel.setId("TestTask" + index) + taskParallel.setTaskId("TestTask" + index) + taskParallel.setTitle(new I18nString("START" + index)) taskParallel.setProcessId("TestProcess") Future resultFuture = elasticTaskService.scheduleTaskIndexing(taskParallel) ElasticTask result = resultFuture.get() @@ -206,9 +206,9 @@ class ElasticTaskTest { executorService.submit(() -> { try { ElasticTask taskParallel = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - task.setId("TestTask" + index) - task.setTaskId("TestTask" + index) - task.setTitle(new I18nString("START" + index)) + taskParallel.setId("TestTask" + index) + taskParallel.setTaskId("TestTask" + index) + taskParallel.setTitle(new I18nString("START" + index)) Future resultFuture = elasticTaskService.scheduleTaskIndexing(taskParallel) ElasticTask result = resultFuture.get() assert result != null @@ -237,6 +237,7 @@ class ElasticTaskTest { int pocetOpakovani = 1000 ElasticTask task = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() task.setId("TestTask") + task.setTaskId("TestTask") task.setTitle(new I18nString("START")) elasticTaskService.index(task) @@ -248,9 +249,9 @@ class ElasticTaskTest { executorService.submit(() -> { try { ElasticTask taskParallel = new com.netgrif.application.engine.adapter.spring.elastic.domain.ElasticTask() - task.setId("TestTask" + index) - task.setTaskId("TestTask" + index) - task.setTitle(new I18nString("START")) + taskParallel.setId("TestTask" + index) + taskParallel.setTaskId("TestTask" + index) + taskParallel.setTitle(new I18nString("START")) Future resultFuture = elasticTaskService.scheduleTaskIndexing(taskParallel) ElasticTask result = resultFuture.get() assert result != null