diff --git a/pom.xml b/pom.xml index b0e8639c..aa3c19c9 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ 4.0.0 com.iemr.mmu mmu-api - 3.4.0 + 3.4.1 war MMU-API diff --git a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java index 4e96ba30..dc916f29 100644 --- a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java +++ b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java @@ -39,7 +39,6 @@ import com.google.gson.annotations.Expose; import lombok.Data; -import lombok.Data; @Entity @Data diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index f7a55d4a..bc927104 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -98,8 +98,8 @@ public List> getDataForGivenSchemaAndTable(String schema, St return resultSetList; } - public int updateProcessedFlagInVan(String schemaName, String tableName, StringBuilder vanSerialNos, - String autoIncreamentColumn, String user) throws Exception { + public int updateProcessedFlagInVan(String schemaName, String tableName, List vanSerialNos, + String autoIncreamentColumn, String user, String status, String reason) throws Exception { jdbcTemplate = getJdbcTemplate(); String query = ""; @@ -107,16 +107,16 @@ public int updateProcessedFlagInVan(String schemaName, String tableName, StringB if (tableName != null && tableName.toLowerCase().equals("i_ben_flow_outreach")) { query = "UPDATE " + schemaName + "." + tableName - + " SET created_date = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " - + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + + " SET created_date = ? , processed = ?, SyncedDate = ?, Syncedby = ? , SyncFailureReason = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + String.join(",", vanSerialNos) + ")"; } else { query = "UPDATE " + schemaName + "." + tableName - + " SET CreatedDate = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " - + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + + " SET CreatedDate = ? , processed = ?, SyncedDate = ?, Syncedby = ? , SyncFailureReason = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + String.join(",", vanSerialNos) + ")"; } Timestamp syncedDate = new Timestamp(System.currentTimeMillis()); - int updatedRows = jdbcTemplate.update(query, syncedDate, syncedDate, user); + int updatedRows = jdbcTemplate.update(query, syncedDate, status, syncedDate, user, reason); return updatedRows; } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/SyncResult.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/SyncResult.java new file mode 100644 index 00000000..b51a536d --- /dev/null +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/SyncResult.java @@ -0,0 +1,46 @@ +/* +* AMRIT – Accessible Medical Records via Integrated Technology +* Integrated EHR (Electronic Health Records) Solution +* +* Copyright (C) "Piramal Swasthya Management and Research Institute" +* +* This file is part of AMRIT. +* +* This program is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with this program. If not, see https://www.gnu.org/licenses/. +*/ +package com.iemr.mmu.service.dataSyncActivity; + +import lombok.Data; + +@Data +public class SyncResult { + private String schemaName; + private String tableName; + private String vanSerialNo; + private String syncedBy; + private boolean success; + private String reason; // Failure reason if any + + // Constructor + public SyncResult(String schemaName, String tableName, String vanSerialNo, String syncedBy, boolean success, String reason) { + this.schemaName = schemaName; + this.tableName = tableName; + this.vanSerialNo = vanSerialNo; + this.syncedBy = syncedBy; + this.success = success; + this.reason = reason; + } + + // Getters & setters omitted for brevity +} diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 51d8a32b..266d3d28 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; +import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -88,22 +89,15 @@ public class UploadDataToServerImpl implements UploadDataToServer { @Autowired private CookieUtil cookieUtil; - // batch size for data upload - // private static final int BATCH_SIZE = 30; - /** * * @param groupName * @param Authorization * @return */ - // @Transactional(propagation = Propagation.REQUIRES_NEW, rollbackFor = { - // Exception.class }) public String getDataToSyncToServer(int vanID, String user, String Authorization, String token) throws Exception { - String syncData = null; syncData = syncIntercepter(vanID, user, Authorization, token); - return syncData; } @@ -113,148 +107,281 @@ public String getDataToSyncToServer(int vanID, String user, String Authorization * @return */ public String syncIntercepter(int vanID, String user, String Authorization, String token) throws Exception { - // sync activity trigger - String serverAcknowledgement = startDataSync(vanID, user, Authorization, token); - return serverAcknowledgement; } /** + * Enhanced startDataSync method with table-level and group-level tracking * * @param syncTableDetailsIDs * @param Authorization * @return */ - private String startDataSync(int vanID, String user, String Authorization, String token) throws Exception { String serverAcknowledgement = null; - List> responseStatus = new ArrayList<>(); - boolean isProgress = false; + List> responseStatus = new ArrayList<>(); boolean hasSyncFailed = false; ObjectMapper objectMapper = new ObjectMapper(); + // fetch group masters List dataSyncGroupList = dataSyncGroupsRepo.findByDeleted(false); logger.debug("Fetched DataSyncGroups: {}", objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(dataSyncGroupList)); + for (DataSyncGroups dataSyncGroups : dataSyncGroupList) { int groupId = dataSyncGroups.getSyncTableGroupID(); + String groupName = dataSyncGroups.getSyncTableGroupName(); // Get group name if available + List syncUtilityClassList = getVanAndServerColumns(groupId); logger.debug("Fetched SyncUtilityClass for groupId {}: {}", groupId, objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncUtilityClassList)); - List> syncData; - List> syncDataBatch; - Map groupIdStatus = new HashMap<>(); - for (SyncUtilityClass obj : syncUtilityClassList) { - // if (!isProgress) { - // get data from DB to sync to server - syncData = getDataToSync(obj.getSchemaName(), obj.getTableName(), obj.getVanColumnName()); - logger.debug("Fetched syncData for schema {} and table {}: {}", obj.getSchemaName(), obj.getTableName(), - objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncData)); - // System.out.println(new Gson().toJson(syncData)); - if (syncData != null && syncData.size() > 0) { - int dataSize = syncData.size(); - int startIndex = 0; - int fullBatchCount = dataSize / BATCH_SIZE; - int remainder = dataSize % BATCH_SIZE; - - logger.info("Starting batch sync for schema: {}, table: {} with {} full batches and {} remainder", - obj.getSchemaName(), obj.getTableName(), fullBatchCount, remainder); - - - for (int i = 0; i < fullBatchCount; i++) { - syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, - BATCH_SIZE); - serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), - obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, - Authorization, token); - logger.debug("Server acknowledgement for batch {}: {}", i, serverAcknowledgement); - - if (serverAcknowledgement == null || !serverAcknowledgement.contains("success")) { - logger.error("Sync failed for batch {} in schema: {}, table: {}", i, obj.getSchemaName(), - obj.getTableName()); - hasSyncFailed = true; - setResponseStatus(groupIdStatus, groupId, "failed", responseStatus); - break; - } - - startIndex += BATCH_SIZE; - } - - if (!hasSyncFailed && remainder > 0) { - syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, - remainder); - serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), - obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, - Authorization, token); - - if (serverAcknowledgement == null || !serverAcknowledgement.contains("success")) { - logger.error("Sync failed for remaining data in schema: {}, table: {}", obj.getSchemaName(), - obj.getTableName()); - hasSyncFailed = true; - setResponseStatus(groupIdStatus, groupId, "failed", responseStatus); - break; - } - } - if (!hasSyncFailed) { - logger.info("Data sync completed for schema: {}, table: {}", obj.getSchemaName(), - obj.getTableName()); - setResponseStatus(groupIdStatus, groupId, "completed", responseStatus); - } - } else { - logger.info("No data to sync for schema {} and table {}", obj.getSchemaName(), obj.getTableName()); - setResponseStatus(groupIdStatus, groupId, "completed", responseStatus); - } + // Track table-level results for this group + List> tableDetailsList = new ArrayList<>(); + boolean groupHasFailures = false; + + for (SyncUtilityClass obj : syncUtilityClassList) { + String tableKey = obj.getSchemaName() + "." + obj.getTableName(); + boolean tableHasError = false; + + // get data from DB to sync to server + List> syncData = getDataToSync(obj.getSchemaName(), obj.getTableName(), + obj.getVanColumnName()); + logger.debug("Fetched syncData for schema {} and table {}: {}", obj.getSchemaName(), obj.getTableName(), + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncData)); + + if (syncData != null && syncData.size() > 0) { + int dataSize = syncData.size(); + int startIndex = 0; + int fullBatchCount = dataSize / BATCH_SIZE; + int remainder = dataSize % BATCH_SIZE; + + // Track table-level success/failure counts + int totalRecords = dataSize; + int successfulRecords = 0; + int failedRecords = 0; + List tableFailureReasons = new ArrayList<>(); + + logger.info("Starting batch sync for schema: {}, table: {} with {} full batches and {} remainder", + obj.getSchemaName(), obj.getTableName(), fullBatchCount, remainder); + + // Process full batches + for (int i = 0; i < fullBatchCount && !tableHasError; i++) { + List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, + BATCH_SIZE); + + // Updated to handle Map return type + Map syncResult = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), + obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, + Authorization, token); + + if (syncResult == null) { + logger.error("Sync failed for batch {} in schema: {}, table: {}", i, obj.getSchemaName(), + obj.getTableName()); + tableHasError = true; + failedRecords += syncDataBatch.size(); + groupHasFailures = true; + break; + } + + String status = (String) syncResult.get("status"); + int batchSuccessCount = (Integer) syncResult.get("successCount"); + int batchFailCount = (Integer) syncResult.get("failCount"); + @SuppressWarnings("unchecked") + List batchFailureReasons = (List) syncResult.get("failureReasons"); + + successfulRecords += batchSuccessCount; + failedRecords += batchFailCount; + + if (batchFailureReasons != null && !batchFailureReasons.isEmpty()) { + tableFailureReasons.addAll(batchFailureReasons); + groupHasFailures = true; + } + + if (status.equals("Sync failed")) { + tableHasError = true; + break; + } + + startIndex += BATCH_SIZE; + } + + if (!tableHasError && remainder > 0) { + List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, + remainder); + + Map syncResult = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), + obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, + Authorization, token); + + if (syncResult == null) { + logger.error("Sync failed for remaining data in schema: {}, table: {}", obj.getSchemaName(), + obj.getTableName()); + failedRecords += syncDataBatch.size(); + groupHasFailures = true; + } else { + String status = (String) syncResult.get("status"); + int batchSuccessCount = (Integer) syncResult.get("successCount"); + int batchFailCount = (Integer) syncResult.get("failCount"); + @SuppressWarnings("unchecked") + List batchFailureReasons = (List) syncResult.get("failureReasons"); + + successfulRecords += batchSuccessCount; + failedRecords += batchFailCount; + + if (batchFailureReasons != null && !batchFailureReasons.isEmpty()) { + tableFailureReasons.addAll(batchFailureReasons); + groupHasFailures = true; + } + + if (status.equals("Sync failed")) { + groupHasFailures = true; + } + } + } + + // Determine table status based on success/failure counts + String tableStatus; + if (successfulRecords == totalRecords && failedRecords == 0) { + tableStatus = "success"; + } else if (failedRecords == totalRecords && successfulRecords == 0) { + tableStatus = "failed"; + groupHasFailures = true; + } else if (successfulRecords > 0 && failedRecords > 0) { + tableStatus = "partial"; + } else { + tableStatus = "failed"; // Default to failed if unclear + groupHasFailures = true; + } + + // Create detailed table info with failure reasons + Map tableDetails = new HashMap<>(); + tableDetails.put("tableName", obj.getTableName()); + tableDetails.put("schemaName", obj.getSchemaName()); + tableDetails.put("status", tableStatus); + tableDetails.put("totalRecords", totalRecords); + tableDetails.put("successfulRecords", successfulRecords); + tableDetails.put("failedRecords", failedRecords); + + // Add failure reasons only if there are any failures + if (!tableFailureReasons.isEmpty()) { + tableDetails.put("failureReasons", tableFailureReasons); + } + + tableDetailsList.add(tableDetails); + + logger.info("Table sync summary - {}: {} (Success: {}, Failed: {}, Total: {}, Failure Reasons: {})", + tableKey, tableStatus, successfulRecords, failedRecords, totalRecords, + tableFailureReasons.isEmpty() ? "None" : tableFailureReasons); + + } else { + logger.info("No data to sync for schema {} and table {}", obj.getSchemaName(), obj.getTableName()); + + Map tableDetails = new HashMap<>(); + tableDetails.put("tableName", obj.getTableName()); + tableDetails.put("schemaName", obj.getSchemaName()); + tableDetails.put("status", "no_data"); + tableDetails.put("totalRecords", 0); + tableDetails.put("successfulRecords", 0); + tableDetails.put("failedRecords", 0); + tableDetailsList.add(tableDetails); + } + + // If this table had critical failures, stop processing this group + if (tableHasError) { + hasSyncFailed = true; + break; + } +} + // Determine overall group status + String groupStatus; + long successTables = tableDetailsList.stream() + .filter(table -> "success".equals(table.get("status")) || "no_data".equals(table.get("status"))) + .count(); + long partialTables = tableDetailsList.stream() + .filter(table -> "partial".equals(table.get("status"))) + .count(); + long failedTables = tableDetailsList.stream() + .filter(table -> "failed".equals(table.get("status"))) + .count(); + + if (failedTables == 0 && partialTables == 0) { + groupStatus = "completed"; + } else if (failedTables > 0 && successTables == 0 && partialTables == 0) { + groupStatus = "failed"; + } else { + groupStatus = "partial"; + } - if (hasSyncFailed) { - // Mark all subsequent groups as "pending" - for (DataSyncGroups remainingGroup : dataSyncGroupList - .subList(dataSyncGroupList.indexOf(dataSyncGroups) + 1, dataSyncGroupList.size())) { - Map pendingGroupIdStatus = new HashMap<>(); - pendingGroupIdStatus.put("groupId", String.valueOf(remainingGroup.getSyncTableGroupID())); - pendingGroupIdStatus.put("status", "pending"); - responseStatus.add(pendingGroupIdStatus); - } - break; + // Create group response + Map groupResponse = new HashMap<>(); + groupResponse.put("syncTableGroupID", groupId); + groupResponse.put("syncTableGroupName", groupName != null ? groupName : "Group " + groupId); + groupResponse.put("status", groupStatus); + groupResponse.put("tables", tableDetailsList); + groupResponse.put("summary", Map.of( + "totalTables", tableDetailsList.size(), + "successfulTables", successTables, + "partialTables", partialTables, + "failedTables", failedTables)); + + responseStatus.add(groupResponse); + + if (hasSyncFailed) { + // Mark all subsequent groups as "pending" + for (int j = dataSyncGroupList.indexOf(dataSyncGroups) + 1; j < dataSyncGroupList.size(); j++) { + DataSyncGroups remainingGroup = dataSyncGroupList.get(j); + Map pendingGroupResponse = new HashMap<>(); + pendingGroupResponse.put("syncTableGroupID", remainingGroup.getSyncTableGroupID()); + pendingGroupResponse.put("syncTableGroupName", + remainingGroup.getSyncTableGroupName() != null ? remainingGroup.getSyncTableGroupName() + : "Group " + remainingGroup.getSyncTableGroupID()); + pendingGroupResponse.put("status", "pending"); + pendingGroupResponse.put("tables", new ArrayList<>()); + pendingGroupResponse.put("summary", Map.of( + "totalTables", 0, + "successfulTables", 0L, + "partialTables", 0L, + "failedTables", 0L)); + responseStatus.add(pendingGroupResponse); } + break; } } + // Create final response + Map finalResponse = new HashMap<>(); if (hasSyncFailed) { - Map response = new HashMap<>(); - response.put("response", "Data sync failed"); - response.put("groupsProgress", responseStatus); - objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(response); + finalResponse.put("response", "Data sync failed"); + finalResponse.put("groupsProgress", responseStatus); return objectMapper.writerWithDefaultPrettyPrinter() - .writeValueAsString(Collections.singletonMap("data", response)); + .writeValueAsString(finalResponse); } else { - if ("No data to sync".equals(serverAcknowledgement)) { - return serverAcknowledgement; + // Check if there was any data to sync + boolean hasData = responseStatus.stream() + .anyMatch(group -> { + @SuppressWarnings("unchecked") + List> tables = (List>) ((Map) group) + .get("tables"); + return tables.stream().anyMatch(table -> !("no_data".equals(table.get("status")))); + }); + + if (!hasData) { + return "No data to sync"; } else { - return "Data successfully synced"; + finalResponse.put("response", "Data sync completed"); + finalResponse.put("groupsProgress", responseStatus); + return objectMapper.writerWithDefaultPrettyPrinter() + .writeValueAsString(finalResponse); } } } - private void setResponseStatus(Map groupIdStatus, int groupId, String serverAcknowledgement, - List> responseStatus) { - groupIdStatus.put("groupId", String.valueOf(groupId)); - groupIdStatus.put("status", serverAcknowledgement); - responseStatus.add(groupIdStatus); - } - - /** - * - * @param syncTableDetailsIDs - * @return - */ - private List getVanAndServerColumns(Integer groupID) throws Exception { List syncUtilityClassList = getVanAndServerColumnList(groupID); logger.debug("Fetched SyncUtilityClass list for groupID {}: {}", groupID, syncUtilityClassList); - return syncUtilityClassList; } @@ -265,23 +392,13 @@ public List getVanAndServerColumnList(Integer groupID) throws return syncUtilityClassList; } - /** - * - * @param schemaName - * @param tableName - * @param columnNames - * @return - */ - private List> getDataToSync(String schemaName, String tableName, String columnNames) throws Exception { - logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); + logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); List> resultSetList = dataSyncRepository.getDataForGivenSchemaAndTable(schemaName, tableName, columnNames); if (resultSetList != null) { logger.debug("Fetched {} records for schema '{}', table '{}'", resultSetList.size(), schemaName, tableName); - // Optionally log a sample of the resultSetList for verification (be careful - // with large datasets) if (!resultSetList.isEmpty()) { logger.debug("Sample record: {}", resultSetList.get(0)); } @@ -291,82 +408,121 @@ private List> getDataToSync(String schemaName, String tableN return resultSetList; } - /** - * - * @param syncData - * @param startIndex - * @param size - * @return - */ - private List> getBatchOfAskedSizeDataToSync(List> syncData, int startIndex, int size) throws Exception { List> syncDataOfBatchSize = syncData.subList(startIndex, (startIndex + size)); return syncDataOfBatchSize; } - /** - * - * @param schemaName - * @param tableName - * @param vanAutoIncColumnName - * @param serverColumns - * @param dataToBesync - * @param Authorization - * @return - */ - - public String syncDataToServer(int vanID, String schemaName, String tableName, String vanAutoIncColumnName, - String serverColumns, List> dataToBesync, String user, String Authorization, - String token) - throws Exception { - - RestTemplate restTemplate = new RestTemplate(); - - Integer facilityID = masterVanRepo.getFacilityID(vanID); - - // serialize null - GsonBuilder gsonBuilder = new GsonBuilder(); - gsonBuilder.serializeNulls(); - Gson gson = gsonBuilder.create(); - - Map dataMap = new HashMap<>(); - dataMap.put("schemaName", schemaName); - dataMap.put("tableName", tableName); - dataMap.put("vanAutoIncColumnName", vanAutoIncColumnName); - dataMap.put("serverColumns", serverColumns); - dataMap.put("syncData", dataToBesync); - dataMap.put("syncedBy", user); - if (facilityID != null) - dataMap.put("facilityID", facilityID); - - String requestOBJ = gson.toJson(dataMap); - HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); - ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, - String.class); - - int i = 0; - if (response != null && response.hasBody()) { - JSONObject obj = new JSONObject(response.getBody()); - if (obj != null && obj.has("statusCode") && obj.getInt("statusCode") == 200) { - StringBuilder vanSerialNos = getVanSerialNoListForSyncedData(vanAutoIncColumnName, dataToBesync); - - i = dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, vanSerialNos, - vanAutoIncColumnName, user); + public Map syncDataToServer(int vanID, String schemaName, String tableName, String vanAutoIncColumnName, + String serverColumns, List> dataToBesync, String user, String Authorization, + String token) throws Exception { + + RestTemplate restTemplate = new RestTemplate(); + Integer facilityID = masterVanRepo.getFacilityID(vanID); + + // serialize null + GsonBuilder gsonBuilder = new GsonBuilder(); + gsonBuilder.serializeNulls(); + Gson gson = gsonBuilder.create(); + + Map dataMap = new HashMap<>(); + dataMap.put("schemaName", schemaName); + dataMap.put("tableName", tableName); + dataMap.put("vanAutoIncColumnName", vanAutoIncColumnName); + dataMap.put("serverColumns", serverColumns); + dataMap.put("syncData", dataToBesync); + dataMap.put("syncedBy", user); + if (facilityID != null) + dataMap.put("facilityID", facilityID); + + String requestOBJ = gson.toJson(dataMap); + HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); + ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, + String.class); + + logger.info("Response from the server=" + response); + + int successCount = 0; + int failCount = 0; + List successVanSerialNos = new ArrayList<>(); + List failedVanSerialNos = new ArrayList<>(); + List failureReasons = new ArrayList<>(); + + if (response != null && response.hasBody()) { + JSONObject obj = new JSONObject(response.getBody()); + if (obj.has("data")) { + JSONObject dataObj = obj.getJSONObject("data"); + if (dataObj.has("records")) { + JSONArray recordsArr = dataObj.getJSONArray("records"); + for (int i = 0; i < recordsArr.length(); i++) { + JSONObject record = recordsArr.getJSONObject(i); + String vanSerialNo = record.getString("vanSerialNo"); + boolean success = record.getBoolean("success"); + if (success) { + successVanSerialNos.add(vanSerialNo); + successCount++; + } else { + failedVanSerialNos.add(vanSerialNo); + failCount++; + // Capture the failure reason + String reason = record.optString("reason", "Unknown error"); + failureReasons.add(reason); + } + } + } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { + // Handle summary response for m_beneficiaryregidmapping + String respMsg = dataObj.optString("response", ""); + int statusCode = obj.optInt("statusCode", 0); + if (respMsg.toLowerCase().contains("success") && statusCode == 200) { + // All records are successful + for (Map map : dataToBesync) { + successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + successCount = successVanSerialNos.size(); + } else { + // All records failed + for (Map map : dataToBesync) { + failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + failCount = failedVanSerialNos.size(); + // Add a generic reason for all failed records + for (int i = 0; i < failCount; i++) { + failureReasons.add(respMsg.isEmpty() ? "Sync failed" : respMsg); + } + } } } - if (i > 0) - return "Data successfully synced"; - else - return null; } - /** - * - * @param vanAutoIncColumnName - * @param dataToBesync - * @return - */ + logger.info("Success Van Serial No=" + successVanSerialNos.toString()); + logger.info("Failed Van Serial No=" + failedVanSerialNos.toString()); + + // Update processed flag for success and failed vanSerialNos + if (!successVanSerialNos.isEmpty()) { + dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, + vanAutoIncColumnName, user, "P","Null"); + } + if (!failedVanSerialNos.isEmpty()) { + dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, failedVanSerialNos, + vanAutoIncColumnName, user, "F",failureReasons.get(0)); + } + + // Return detailed result object instead of just a string + Map result = new HashMap<>(); + if (successCount > 0 && failCount == 0) { + result.put("status", "Data successfully synced"); + } else if (successCount > 0 && failCount > 0) { + result.put("status", "Partial success: " + successCount + " records synced, " + failCount + " failed"); + } else { + result.put("status", "Sync failed"); + } + + result.put("successCount", successCount); + result.put("failCount", failCount); + + return result; +} public StringBuilder getVanSerialNoListForSyncedData(String vanAutoIncColumnName, List> dataToBesync) throws Exception { @@ -392,5 +548,4 @@ public String getDataSyncGroupDetails() { else return null; } - -} +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index fcc68fd3..e338ba17 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -21,6 +21,7 @@ */ package com.iemr.mmu.service.dataSyncLayerCentral; +import java.sql.SQLException; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; @@ -35,6 +36,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; +import com.iemr.mmu.service.dataSyncActivity.SyncResult; @Service public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { @@ -95,25 +97,24 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); List> dataToBesync = syncUploadDataDigester.getSyncData(); - logger.info("Data to be synced: {}", dataToBesync); + List syncResults = new ArrayList<>(); // <-- define here + if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { logger.error("Invalid SyncUploadDataDigester object or tableName is null."); return "Error: Invalid sync request."; } String syncTableName = syncUploadDataDigester.getTableName(); - logger.info("Syncing data for table: {}", syncTableName); // Handle specific tables first, if their logic is distinct if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { - String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester); + String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester, syncResults); if ("data sync passed".equals(result)) { return "Sync successful for m_beneficiaryregidmapping."; } else { logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } - else { + } else { boolean syncSuccess = true; String errorMessage = ""; if (syncTableName != null && !syncTableName.isEmpty()) { @@ -123,7 +124,7 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E if (map.get("tableName") != null && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, - syncUploadDataDigester); + syncUploadDataDigester, syncResults); foundInGroup = true; break; } @@ -131,7 +132,7 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E if (!foundInGroup) { logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", syncTableName); - syncSuccess = performGenericTableSync(syncUploadDataDigester); + syncSuccess = performGenericTableSync(syncUploadDataDigester, syncResults); } } else { @@ -142,7 +143,7 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E try { boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), - table, syncUploadDataDigester); + table, syncUploadDataDigester, syncResults); if (!currentTableSyncResult) { syncSuccess = false; errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; @@ -163,16 +164,19 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E } } - if (syncSuccess) { - return "Overall data sync passed."; - } else { - return "Overall data sync failed. Details: " + errorMessage; - } + Map responseMap = new HashMap<>(); + responseMap.put("statusCode", 200); + responseMap.put("message", "Data sync completed"); + responseMap.put("records", syncResults); + logger.info("Response = " + responseMap); + logger.info("Sync Results = " + syncResults); + return new ObjectMapper().writeValueAsString(responseMap); + } } private boolean syncTablesInGroup(String schemaName, String currentTableName, - SyncUploadDataDigester originalDigester) { + SyncUploadDataDigester originalDigester, List syncResults) { SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); tableSpecificDigester.setTableName(currentTableName); @@ -181,11 +185,11 @@ private boolean syncTablesInGroup(String schemaName, String currentTableName, tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); tableSpecificDigester.setSyncData(originalDigester.getSyncData()); - return performGenericTableSync(tableSpecificDigester); + return performGenericTableSync(tableSpecificDigester, syncResults); } private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( - SyncUploadDataDigester syncUploadDataDigester) { + SyncUploadDataDigester syncUploadDataDigester, List syncResults) { List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); @@ -297,219 +301,389 @@ private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableN return queryBuilder.toString(); } - /** - * Handles the generic synchronization logic for tables not covered by specific - * handlers. - */ + private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, + List syncResults) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncDataListInsert = new ArrayList<>(); + List syncDataListUpdate = new ArrayList<>(); - private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncDataListInsert = new ArrayList<>(); - List syncDataListUpdate = new ArrayList<>(); + // Track indices for insert and update operations + Map insertIndexMap = new HashMap<>(); // syncResults index -> insert list index + Map updateIndexMap = new HashMap<>(); // syncResults index -> update list index - if (dataToBesync == null || dataToBesync.isEmpty()) { - logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); - return true; // Nothing to sync, consider it a success - } + boolean overallSuccess = true; - String syncTableName = syncUploadDataDigester.getTableName(); - String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); - String schemaName = syncUploadDataDigester.getSchemaName(); - Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); - String serverColumns = syncUploadDataDigester.getServerColumns(); - - List serverColumnsList = Arrays.asList(serverColumns.split(",")); - - for (Map map : dataToBesync) { - // Create a new map with clean column names as keys - Map cleanRecord = new HashMap<>(); - for (String key : map.keySet()) { - String cleanKey = key; - // Handle keys with SQL functions like date_format - if (key.startsWith("date_format(") && key.endsWith(")")) { - int start = key.indexOf("(") + 1; - int end = key.indexOf(","); - if (end > start) { - cleanKey = key.substring(start, end).trim(); - } else { - // Fallback if format is unexpected - cleanKey = key.substring(start, key.indexOf(")")).trim(); + if (dataToBesync == null || dataToBesync.isEmpty()) { + logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); + return true; + } + + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String schemaName = syncUploadDataDigester.getSchemaName(); + Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + String serverColumns = syncUploadDataDigester.getServerColumns(); + + int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); + List serverColumnsList = Arrays.asList(serverColumns.split(",")); + + for (Map map : dataToBesync) { + // Create a new map with clean column names as keys + Map cleanRecord = new HashMap<>(); + for (String key : map.keySet()) { + String cleanKey = key; + // Handle keys with SQL functions like date_format + if (key.startsWith("date_format(") && key.endsWith(")")) { + int start = key.indexOf("(") + 1; + int end = key.indexOf(","); + if (end > start) { + cleanKey = key.substring(start, end).trim(); + } else { + cleanKey = key.substring(start, key.indexOf(")")).trim(); + } } + cleanRecord.put(cleanKey.trim(), map.get(key)); } - cleanRecord.put(cleanKey.trim(), map.get(key)); - } - String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); - String vanID = String.valueOf(cleanRecord.get("VanID")); - int syncFacilityID = 0; - - // Update SyncedBy and SyncedDate in the xmap itself before processing - cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); - cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); - - if (facilityIDFromDigester != null) { - // Determine the 'Processed' status based on facility ID for specific tables - switch (syncTableName.toLowerCase()) { - case "t_indent": - case "t_indentorder": { - if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { - Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); - if (fromFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); + String vanID = String.valueOf(cleanRecord.get("VanID")); + int syncFacilityID = 0; + + // Update SyncedBy and SyncedDate in the cleanRecord + cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); + + if (facilityIDFromDigester != null) { + // Determine the 'Processed' status based on facility ID for specific tables + switch (syncTableName.toLowerCase()) { + case "t_indent": + case "t_indentorder": { + if (cleanRecord.containsKey("FromFacilityID") + && cleanRecord.get("FromFacilityID") instanceof Number) { + Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); + if (fromFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_indentissue": { - if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { - Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); - if (toFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_indentissue": { + if (cleanRecord.containsKey("ToFacilityID") + && cleanRecord.get("ToFacilityID") instanceof Number) { + Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); + if (toFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_stocktransfer": { - if (cleanRecord.containsKey("TransferToFacilityID") - && cleanRecord.get("TransferToFacilityID") instanceof Number) { - Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); - if (transferToFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_stocktransfer": { + if (cleanRecord.containsKey("TransferToFacilityID") + && cleanRecord.get("TransferToFacilityID") instanceof Number) { + Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); + if (transferToFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_itemstockentry": { - if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { - Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); - if (mapFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_itemstockentry": { + if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { + Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); + if (mapFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; + default: + break; } - default: - // No specific facility ID logic for other tables - break; } - } - // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot - if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { - syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); - } + // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { + syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); + } - int recordCheck; - try { - recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); - } catch (Exception e) { - logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", - syncTableName, vanSerialNo, vanID, e.getMessage(), e); - return false; // Critical error, stop sync for this table - } + int recordCheck; + try { + recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + logger.info("Record check result: {}", recordCheck); + } catch (Exception e) { + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", + syncTableName, vanSerialNo, vanID, e.getMessage(), e); - // Prepare Object array for insert/update - List currentRecordValues = new ArrayList<>(); - for (String column : serverColumnsList) { - Object value = cleanRecord.get(column.trim()); - if (value instanceof Boolean) { - currentRecordValues.add(value); - } else if (value != null) { - currentRecordValues.add(String.valueOf(value)); - } else { - currentRecordValues.add(null); + // Store the main error reason from record check failure + String mainErrorReason = "Record check failed: " + extractMainErrorReason(e); + + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, mainErrorReason)); + continue; // Skip to next record } - } - Object[] objArr = currentRecordValues.toArray(); - if (recordCheck == 0) { - syncDataListInsert.add(objArr); - } else { - // For update, append the WHERE clause parameters at the end of the array - List updateParams = new ArrayList<>(Arrays.asList(objArr)); - updateParams.add(String.valueOf(vanSerialNo)); - - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { - updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + // Prepare Object array for insert/update + List currentRecordValues = new ArrayList<>(); + for (String column : serverColumnsList) { + Object value = cleanRecord.get(column.trim()); + if (value instanceof Boolean) { + currentRecordValues.add(value); + } else if (value != null) { + currentRecordValues.add(String.valueOf(value)); + } else { + currentRecordValues.add(null); + } + } + + Object[] objArr = currentRecordValues.toArray(); + + // Add to syncResults first, then track the index + int currentSyncResultIndex = syncResults.size(); + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), true, null)); // Initially set as success + + if (recordCheck == 0) { + // Record doesn't exist - INSERT + insertIndexMap.put(currentSyncResultIndex, syncDataListInsert.size()); + syncDataListInsert.add(objArr); } else { - updateParams.add(String.valueOf(vanID)); + // Record exists - UPDATE + List updateParams = new ArrayList<>(Arrays.asList(objArr)); + updateParams.add(String.valueOf(vanSerialNo)); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { + updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + } else { + updateParams.add(String.valueOf(vanID)); + } + + updateIndexMap.put(currentSyncResultIndex, syncDataListUpdate.size()); + syncDataListUpdate.add(updateParams.toArray()); } - syncDataListUpdate.add(updateParams.toArray()); } - } - boolean insertSuccess = true; - boolean updateSuccess = true; + boolean insertSuccess = true; + boolean updateSuccess = true; + + // Process INSERT operations + if (!syncDataListInsert.isEmpty()) { + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); - if (!syncDataListInsert.isEmpty()) { - String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + try { + int[] insertResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + serverColumns, queryInsert, syncDataListInsert); + + // Update syncResults based on insert results + for (Map.Entry entry : insertIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int insertListIndex = entry.getValue(); + + if (insertListIndex < insertResults.length && insertResults[insertListIndex] > 0) { + // Success - keep the existing success entry + logger.info("Successfully inserted record at index {}", insertListIndex); + } else { + // Failed - update the syncResults entry with concise reason + String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); + String conciseReason = "Insert failed (code: " + + (insertListIndex < insertResults.length ? insertResults[insertListIndex] : "unknown") + + ")"; + + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, conciseReason)); + insertSuccess = false; + } + } - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - serverColumns, queryInsert, syncDataListInsert); - if (i.length != syncDataListInsert.size()) { + } catch (Exception e) { insertSuccess = false; - logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", - syncTableName, syncDataListInsert.size(), i.length, - getFailedRecords(i, syncDataListInsert)); - } else { - logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); + logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + + // Store the main error reason instead of complete exception message + String mainErrorReason = extractMainErrorReason(e); + + // Update all insert-related syncResults to failed with concise error message + for (Map.Entry entry : insertIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int insertListIndex = entry.getValue(); + String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); + + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, "INSERT: " + mainErrorReason)); + } } - } catch (Exception e) { - insertSuccess = false; - logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); } - } - if (!syncDataListUpdate.isEmpty()) { - String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); - try { - int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); - if (j.length != syncDataListUpdate.size()) { + // Process UPDATE operations + if (!syncDataListUpdate.isEmpty()) { + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + + try { + int[] updateResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + serverColumns, queryUpdate, syncDataListUpdate); + + // Update syncResults based on update results + for (Map.Entry entry : updateIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int updateListIndex = entry.getValue(); + + if (updateListIndex < updateResults.length && updateResults[updateListIndex] > 0) { + // Success - keep the existing success entry + logger.info("Successfully updated record at index {}", updateListIndex); + } else { + // Failed - update the syncResults entry with concise reason + String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); + String conciseReason = "Update failed (code: " + + (updateListIndex < updateResults.length ? updateResults[updateListIndex] : "unknown") + + ")"; + + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, conciseReason)); + updateSuccess = false; + } + } + + } catch (Exception e) { updateSuccess = false; - logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", - syncTableName, syncDataListUpdate.size(), j.length, - getFailedRecords(j, syncDataListUpdate)); - } else { - logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); + logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + + // Store the main error reason instead of complete exception message + String mainErrorReason = extractMainErrorReason(e); + + // Update all update-related syncResults to failed with concise error message + for (Map.Entry entry : updateIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int updateListIndex = entry.getValue(); + String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); + + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, "UPDATE: " + mainErrorReason)); + } } - } catch (Exception e) { - updateSuccess = false; - logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); } + + logger.info("Sync results for table {}: {}", syncTableName, syncResults); + return insertSuccess && updateSuccess; } - return insertSuccess && updateSuccess; -} - private String getQueryToInsertDataToServerDB(String schemaName, String - tableName, String serverColumns) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); - - StringBuilder preparedStatementSetter = new StringBuilder(); - - if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append("?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } - } + + // Helper method to extract concise but meaningful error message + private String extractMainErrorReason(Exception e) { + if (e == null) { + return "Unknown error"; + } + + String message = e.getMessage(); + if (message == null || message.trim().isEmpty()) { + return e.getClass().getSimpleName(); + } + + // Extract key information based on common error patterns + message = message.trim(); + + // Handle SQL constraint violations - extract the key constraint info + if (message.contains("Duplicate entry") && message.contains("for key")) { + // Extract: "Duplicate entry 'value' for key 'constraint_name'" + int keyStart = message.indexOf("for key '") + 9; + int keyEnd = message.indexOf("'", keyStart); + if (keyStart > 8 && keyEnd > keyStart) { + return "Duplicate key: " + message.substring(keyStart, keyEnd); + } + return "Duplicate entry error"; + } + + // Handle column cannot be null + if (message.contains("cannot be null")) { + int colStart = message.indexOf("Column '") + 8; + int colEnd = message.indexOf("'", colStart); + if (colStart > 7 && colEnd > colStart) { + return "Required field: " + message.substring(colStart, colEnd); + } + return "Required field missing"; + } + + // Handle data too long errors + if (message.contains("Data too long for column")) { + int colStart = message.indexOf("column '") + 8; + int colEnd = message.indexOf("'", colStart); + if (colStart > 7 && colEnd > colStart) { + return "Data too long: " + message.substring(colStart, colEnd); + } + return "Data length exceeded"; + } + + // Handle foreign key constraint violations + if (message.contains("foreign key constraint")) { + if (message.contains("CONSTRAINT `")) { + int constStart = message.indexOf("CONSTRAINT `") + 12; + int constEnd = message.indexOf("`", constStart); + if (constStart > 11 && constEnd > constStart) { + return "FK violation: " + message.substring(constStart, constEnd); + } + } + return "Foreign key constraint failed"; + } + + // Handle connection/timeout issues + if (message.toLowerCase().contains("timeout")) { + return "Database connection timeout"; + } + + if (message.toLowerCase().contains("connection")) { + return "Database connection failed"; + } + + // Handle table/schema issues + if (message.contains("doesn't exist")) { + return "Table/schema not found"; + } + + // For other cases, try to get the first meaningful part of the message + // Split by common delimiters and take the first substantial part + String[] parts = message.split("[;:|]"); + for (String part : parts) { + part = part.trim(); + if (part.length() > 10 && part.length() <= 100) { // Reasonable length + return part; + } + } + + // If message is short enough, return it as is + if (message.length() <= 150) { + return message; + } + + // Otherwise, truncate to first 150 characters + return message.substring(0, 150) + "..."; } - StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append("("); - queryBuilder.append(serverColumns); - queryBuilder.append(") VALUES ("); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(")"); - return queryBuilder.toString(); + private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); + + StringBuilder preparedStatementSetter = new StringBuilder(); + + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } + } + + StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append("("); + queryBuilder.append(serverColumns); + queryBuilder.append(") VALUES ("); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(")"); + logger.info("Test Query Builder: {}", queryBuilder.toString()); + return queryBuilder.toString(); } public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { @@ -519,7 +693,17 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol StringBuilder preparedStatementSetter = new StringBuilder(); - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + String column = columnsArr[i].trim(); + preparedStatementSetter.append(column).append(" = ?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } + } + + StringBuilder queryBuilder = new StringBuilder("UPDATE "); queryBuilder.append(schemaName).append(".").append(tableName); queryBuilder.append(" SET "); queryBuilder.append(preparedStatementSetter); @@ -533,11 +717,13 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol } else { queryBuilder.append(" AND VanID = ? "); } + logger.info("Test Query Builder: {}", queryBuilder.toString()); return queryBuilder.toString(); } // Helper to get information about failed records (for logging purposes) private String getFailedRecords(int[] results, List data) { + logger.info("Inside get Failed Records"); List failedRecordsInfo = new ArrayList<>(); for (int k = 0; k < results.length; k++) { // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or @@ -558,5 +744,4 @@ private String getFailedRecords(int[] results, List data) { return String.join("; ", failedRecordsInfo); } - } \ No newline at end of file