From a3a8079320cbcc221f8bf8a87a5c0d97913f96e4 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 16 Jul 2025 15:45:09 +0530 Subject: [PATCH 01/45] Fix the issue in retrieving the Casesheet Print Data for Cancer Screening (#96) * fix: change the return type to object to get the details * fix: remove commented lines --- pom.xml | 17 +++++++++++------ .../iemr/mmu/repo/nurse/BenVisitDetailRepo.java | 2 +- .../cancerScreening/CSNurseServiceImpl.java | 2 +- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 70859837..eeef30e2 100644 --- a/pom.xml +++ b/pom.xml @@ -163,6 +163,17 @@ jackson-datatype-joda 2.17.0 + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + jakarta.servlet jakarta.servlet-api @@ -179,12 +190,6 @@ org.apache.poi poi-ooxml 5.2.5 - - - org.apache.commons - commons-compress - - diff --git a/src/main/java/com/iemr/mmu/repo/nurse/BenVisitDetailRepo.java b/src/main/java/com/iemr/mmu/repo/nurse/BenVisitDetailRepo.java index d0902ee0..8c659c8b 100644 --- a/src/main/java/com/iemr/mmu/repo/nurse/BenVisitDetailRepo.java +++ b/src/main/java/com/iemr/mmu/repo/nurse/BenVisitDetailRepo.java @@ -68,7 +68,7 @@ public Integer updateBenFlowStatus(@Param("visitFlowStatusFlag") String visitFlo + "bvd.reportFilePath,sp.serviceProviderName from BeneficiaryVisitDetail bvd " + "INNER JOIN bvd.providerServiceMapping p " + "INNER JOIN p.serviceProvider sp " + "WHERE bvd.beneficiaryRegID = :benRegID AND bvd.visitCode = :visitCode ") - public List getBeneficiaryVisitDetails(@Param("benRegID") Long benRegID, + public List getBeneficiaryVisitDetails(@Param("benRegID") Long benRegID, @Param("visitCode") Long visitCode); @Query(" SELECT COUNT(benVisitID) FROM BeneficiaryVisitDetail WHERE beneficiaryRegID = :benRegID GROUP BY beneficiaryRegID ") diff --git a/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java b/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java index 770e81a8..b6b15f22 100644 --- a/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java @@ -710,7 +710,7 @@ public Map getBenNurseDataForCaseSheet(Long benRegID, Long visit } public BeneficiaryVisitDetail getBeneficiaryVisitDetails(Long benRegID, Long visitCode) { - List beneficiaryVisitDetail = benVisitDetailRepo.getBeneficiaryVisitDetails(benRegID, visitCode); + List beneficiaryVisitDetail = benVisitDetailRepo.getBeneficiaryVisitDetails(benRegID, visitCode); BeneficiaryVisitDetail beneficiaryVisit = null; if (null != beneficiaryVisitDetail) { for (Object[] obj : beneficiaryVisitDetail) { From 458af672d6c553a79385552dd184aab83beaa6cd Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 16 Jul 2025 15:49:36 +0530 Subject: [PATCH 02/45] fix: add the column for NumberperWeek to store and fetch the data (#94) --- .../iemr/mmu/data/anc/BenPersonalHabit.java | 35 +++++++++++++++---- .../repo/nurse/anc/BenPersonalHabitRepo.java | 4 +-- .../transaction/CommonNurseServiceImpl.java | 9 +++-- 3 files changed, 37 insertions(+), 11 deletions(-) diff --git a/src/main/java/com/iemr/mmu/data/anc/BenPersonalHabit.java b/src/main/java/com/iemr/mmu/data/anc/BenPersonalHabit.java index 17594d3e..078c3208 100644 --- a/src/main/java/com/iemr/mmu/data/anc/BenPersonalHabit.java +++ b/src/main/java/com/iemr/mmu/data/anc/BenPersonalHabit.java @@ -93,6 +93,10 @@ public class BenPersonalHabit { @Column(name = "NumberperDay") private Short numberperDay; + @Expose + @Column(name = "NumberperWeek") + private Short numberperWeek; + @Expose @Column(name = "TobaccoUseDuration") private Timestamp tobaccoUseDuration; @@ -333,10 +337,18 @@ public Short getNumberperDay() { return numberperDay; } + public Short getNumberperWeek() { + return numberperWeek; + } + public void setNumberperDay(Short numberperDay) { this.numberperDay = numberperDay; } + public void setNumberperWeek(Short numberperWeek) { + this.numberperWeek = numberperWeek; + } + public Timestamp getTobaccoUseDuration() { return tobaccoUseDuration; } @@ -579,6 +591,9 @@ public ArrayList getPersonalHistory() { if (null != tobaccoInfo.get("numberperDay")) { benPersonalHabit.setNumberperDay(new Short(tobaccoInfo.get("numberperDay"))); } + if (null != tobaccoInfo.get("numberperWeek")) { + benPersonalHabit.setNumberperWeek(new Short(tobaccoInfo.get("numberperWeek"))); + } timePeriodUnit = (String) tobaccoInfo.get("durationUnit"); if (null != tobaccoInfo.get("duration")) { @@ -634,7 +649,7 @@ public ArrayList getPersonalHistory() { } public BenPersonalHabit(Date createdDate, String dietaryType, String physicalActivityType, String tobaccoUseStatus, - String tobaccoUseType, String otherTobaccoUseType, Short numberperDay, Date tobaccoUseDuration, + String tobaccoUseType, String otherTobaccoUseType, Short numberperDay, Short numberperWeek, Date tobaccoUseDuration, Character riskySexualPracticesStatus) { super(); this.captureDate = createdDate; @@ -650,6 +665,8 @@ public BenPersonalHabit(Date createdDate, String dietaryType, String physicalAct } else if(riskySexualPracticesStatus !=null && riskySexualPracticesStatus == '1') { this.riskySexualPracticeStatus = "Yes"; } + this.numberperWeek = numberperWeek; + } public BenPersonalHabit(Date createdDate, String dietaryType, String physicalActivityType, @@ -687,7 +704,7 @@ public BenPersonalHabit(Long beneficiaryRegID, Long benVisitID, Integer provider } public BenPersonalHabit(String tobaccoUseTypeID, String tobaccoUseType, String otherTobaccoUseType, - Short numberperDay, Timestamp tobaccoUseDuration, String alcoholTypeID, String alcoholType, + Short numberperDay, Short numberperWeek, Timestamp tobaccoUseDuration, String alcoholTypeID, String alcoholType, String otherAlcoholType, String alcoholIntakeFrequency, String avgAlcoholConsumption, Timestamp alcoholDuration, Timestamp createdDate, Long visitCode) { super(); @@ -704,6 +721,7 @@ public BenPersonalHabit(String tobaccoUseTypeID, String tobaccoUseType, String o this.alcoholDuration = alcoholDuration; this.createdDate = createdDate; this.visitCode = visitCode; + this.numberperWeek = numberperWeek; } public static BenPersonalHabit getPersonalDetails(ArrayList personalHistoryDetails) { @@ -712,15 +730,15 @@ public static BenPersonalHabit getPersonalDetails(ArrayList personalHi Object[] obj1 = personalHistoryDetails.get(0); personalDetails = new BenPersonalHabit((Long) obj1[0], (Long) obj1[1], (Integer) obj1[2], (String) obj1[3], - (String) obj1[4], (String) obj1[5], (String) obj1[11], (Character) obj1[18]); + (String) obj1[4], (String) obj1[5], (String) obj1[12], (Character) obj1[19]); ArrayList> tobaccoList = new ArrayList>(); ArrayList> alcoholList = new ArrayList>(); for (Object[] obj : personalHistoryDetails) { BenPersonalHabit personalHabits = new BenPersonalHabit((String) obj[6], (String) obj[7], - (String) obj[8], (Short) obj[9], (Timestamp) obj[10], (String) obj[12], (String) obj[13], - (String) obj[14], (String) obj[15], (String) obj[16], (Timestamp) obj[17], (Timestamp) obj[19], - (Long) obj[20]); + (String) obj[8], (Short) obj[9], (Short) obj[10], (Timestamp) obj[11], (String) obj[13], (String) obj[14], + (String) obj[15], (String) obj[16], (String) obj[17], (Timestamp) obj[18], (Timestamp) obj[20], + (Long) obj[21]); Map timePeriod = null; // Integer timePeriodAgo = null; @@ -732,7 +750,10 @@ public static BenPersonalHabit getPersonalDetails(ArrayList personalHi if (null != personalHabits.getNumberperDay()) { tobaccoInfo.put("numberperDay", personalHabits.getNumberperDay().toString()); } - + if (null != personalHabits.getNumberperWeek()) { + tobaccoInfo.put("numberperWeek", personalHabits.getNumberperWeek().toString()); + } + timePeriod = Utility.convertTimeToWords(personalHabits.getTobaccoUseDuration(), personalHabits.getCreatedDate()); diff --git a/src/main/java/com/iemr/mmu/repo/nurse/anc/BenPersonalHabitRepo.java b/src/main/java/com/iemr/mmu/repo/nurse/anc/BenPersonalHabitRepo.java index 8ad5044f..be3d149d 100644 --- a/src/main/java/com/iemr/mmu/repo/nurse/anc/BenPersonalHabitRepo.java +++ b/src/main/java/com/iemr/mmu/repo/nurse/anc/BenPersonalHabitRepo.java @@ -39,7 +39,7 @@ public interface BenPersonalHabitRepo extends CrudRepository getBenLastVisitID(@Param("beneficiaryRegID") Long beneficiaryRegID); - @Query("select Date(createdDate), dietaryType, physicalActivityType, tobaccoUseStatus, tobaccoUseType, otherTobaccoUseType, numberperDay, " + @Query("select Date(createdDate), dietaryType, physicalActivityType, tobaccoUseStatus, tobaccoUseType, otherTobaccoUseType, numberperDay, numberperWeek, " + "Date(tobaccoUseDuration), riskySexualPracticesStatus from BenPersonalHabit a where a.beneficiaryRegID = :beneficiaryRegID " + "AND tobaccoUseStatus is not null AND deleted = false order by createdDate DESC") public ArrayList getBenPersonalTobaccoHabitDetail(@Param("beneficiaryRegID") Long beneficiaryRegID); @@ -51,7 +51,7 @@ public interface BenPersonalHabitRepo extends CrudRepository getBenPersonalAlcoholHabitDetail(@Param("beneficiaryRegID") Long beneficiaryRegID); @Query(" SELECT beneficiaryRegID, benVisitID, providerServiceMapID, dietaryType, physicalActivityType, tobaccoUseStatus, tobaccoUseTypeID, " - + "tobaccoUseType, otherTobaccoUseType, numberperDay, tobaccoUseDuration, alcoholIntakeStatus, alcoholTypeID, " + + "tobaccoUseType, otherTobaccoUseType, numberperDay, numberperWeek, tobaccoUseDuration, alcoholIntakeStatus, alcoholTypeID, " + "alcoholType, otherAlcoholType, alcoholIntakeFrequency, avgAlcoholConsumption, alcoholDuration, riskySexualPracticesStatus, createdDate, visitCode " + "FROM BenPersonalHabit WHERE beneficiaryRegID = :benRegID AND deleted = false AND visitCode = :visitCode") public ArrayList getBenPersonalHabitDetail(@Param("benRegID") Long benRegID, diff --git a/src/main/java/com/iemr/mmu/service/common/transaction/CommonNurseServiceImpl.java b/src/main/java/com/iemr/mmu/service/common/transaction/CommonNurseServiceImpl.java index 4326df40..0fedb10d 100644 --- a/src/main/java/com/iemr/mmu/service/common/transaction/CommonNurseServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/common/transaction/CommonNurseServiceImpl.java @@ -1085,6 +1085,11 @@ public String fetchBenPersonalTobaccoHistory(Long beneficiaryRegID) { column.put("keyName", "numberperDay"); columns.add(column); + column = new HashMap(); + column.put("columnName", "Number Per Week"); + column.put("keyName", "numberperWeek"); + columns.add(column); + column = new HashMap(); column.put("columnName", "Tobacco Use Start Date"); column.put("keyName", "tobacco_use_duration"); @@ -1100,8 +1105,8 @@ public String fetchBenPersonalTobaccoHistory(Long beneficiaryRegID) { for (Object[] obj : benPersonalHabits) { BenPersonalHabit benPersonalHabit = new BenPersonalHabit((Date) obj[0], (String) obj[1], - (String) obj[2], (String) obj[3], (String) obj[4], (String) obj[5], (Short) obj[6], - (Date) obj[7], (Character) obj[8]); + (String) obj[2], (String) obj[3], (String) obj[4], (String) obj[5], (Short) obj[6], (Short) obj[7], + (Date) obj[8], (Character) obj[9]); personalHabits.add(benPersonalHabit); } From cd5cffbeb95f7cf4fe12f4ff9832dcf9afc1f639 Mon Sep 17 00:00:00 2001 From: Amoghavarsh <93114621+5Amogh@users.noreply.github.com> Date: Mon, 21 Jul 2025 11:57:09 +0530 Subject: [PATCH 03/45] Update version in pom.xml to 3.4.0 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index eeef30e2..e436c7bb 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ 4.0.0 com.iemr.mmu mmu-api - 3.1.0 + 3.4.0 war MMU-API From 16c39912c602ee2a9c7770a607b3a07f67460b67 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Mon, 21 Jul 2025 14:34:15 +0530 Subject: [PATCH 04/45] chore: add Lombok @Data to BenClinicalObservations (#97) --- .../mmu/data/quickConsultation/BenClinicalObservations.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/com/iemr/mmu/data/quickConsultation/BenClinicalObservations.java b/src/main/java/com/iemr/mmu/data/quickConsultation/BenClinicalObservations.java index 3d2941c1..96721ce6 100644 --- a/src/main/java/com/iemr/mmu/data/quickConsultation/BenClinicalObservations.java +++ b/src/main/java/com/iemr/mmu/data/quickConsultation/BenClinicalObservations.java @@ -34,7 +34,10 @@ import com.google.gson.annotations.Expose; +import lombok.Data; + @Entity +@Data @Table(name = "t_benclinicalobservation") public class BenClinicalObservations { @Id From b66035cdf1b1da7bdaba74bc8637b748d4a33978 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Fri, 25 Jul 2025 14:55:13 +0530 Subject: [PATCH 05/45] fix: add file path in cancer gynecological examination (#98) --- .../mmu/data/doctor/CancerGynecologicalExamination.java | 8 ++++++++ .../iemr/mmu/service/cancerScreening/CSServiceImpl.java | 6 ++++++ 2 files changed, 14 insertions(+) diff --git a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java index fb1ab66f..a63bbc9b 100644 --- a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java +++ b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java @@ -33,9 +33,12 @@ import jakarta.persistence.Transient; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.google.gson.annotations.Expose; +import lombok.Data; @Entity +@Data @Table(name = "t_cancergynecologicalexamination") public class CancerGynecologicalExamination { @Id @@ -92,6 +95,11 @@ public class CancerGynecologicalExamination { @Column(name = "RTIOrSTIDetail") private String rTIOrSTIDetail; + @Expose + @Transient + @JsonProperty("fileIDs") + private List fileIDs; + @Expose @Column(name = "FilePath") private String filePath; diff --git a/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java b/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java index c65f38f2..9a97fbcb 100644 --- a/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java @@ -977,6 +977,12 @@ public Long saveBenExaminationDetails(JsonObject requestOBJ, Long benVisitID, St .fromJson(examinationOBJ.get("gynecologicalDetails"), CancerGynecologicalExamination.class); cancerGynecologicalExamination.setBenVisitID(benVisitID); cancerGynecologicalExamination.setVisitCode(benVisitCode); + + if (cancerGynecologicalExamination.getFileIDs() != null) { + cancerGynecologicalExamination.setFilePath( + String.join(",", cancerGynecologicalExamination.getFileIDs())); + } + Long ID = cSNurseServiceImpl.saveCancerGynecologicalExaminationData(cancerGynecologicalExamination); if (ID != null && ID > 0) { // gynecologicalDetails stored successfully... From d878d4b1037a8debbe285cfdfbe2d8f393c993fa Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 30 Jul 2025 11:04:36 +0530 Subject: [PATCH 06/45] Fix the data sync issue (#93) * fix: Data Sync batch processing for large data * fix: use parameterized query * fix: revert the updated query * fix: add token if it is missing while calling restTemplate * fix: update the properties * fix: sync group wise * fix: enable logger in pom.xml * fix: coderabbit comments * fix: remove logger and replace the license * fix: remove the logs * fix: resolve code scanning alert * fix: resolve code scanning alert * fix: resolve code scanning alert * fix: resolve code scanning alert * fix: add comment for code violation * fix: use syncuploaddataDigester class to load the deatils * fix: add syncuploaddigestor in implementation file too * fix: sonarcube comments --- pom.xml | 14 +- .../MMUDataSyncVanToServer.java | 11 +- .../DataSyncRepositoryCentral.java | 348 +++---- .../GetDataFromVanAndSyncToDBImpl.java | 952 +++++++++++------- .../com/iemr/mmu/utils/RestTemplateUtil.java | 45 +- src/main/resources/application.properties | 7 + 6 files changed, 848 insertions(+), 529 deletions(-) diff --git a/pom.xml b/pom.xml index e436c7bb..b0e8639c 100644 --- a/pom.xml +++ b/pom.xml @@ -47,12 +47,22 @@ org.springframework.boot spring-boot-starter - + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} co.elastic.logging diff --git a/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java b/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java index dedad0eb..eeb54e9f 100644 --- a/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java +++ b/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java @@ -36,9 +36,11 @@ import com.iemr.mmu.service.dataSyncLayerCentral.FetchDownloadDataImpl; import com.iemr.mmu.service.dataSyncLayerCentral.GetDataFromVanAndSyncToDBImpl; import com.iemr.mmu.service.dataSyncLayerCentral.GetMasterDataFromCentralForVanImpl; +import com.iemr.mmu.utils.CookieUtil; import com.iemr.mmu.utils.response.OutputResponse; import io.swagger.v3.oas.annotations.Operation; +import jakarta.servlet.http.HttpServletRequest; /*** * @operation Class used for data sync from van-to-server & server-to-van @@ -58,10 +60,15 @@ public class MMUDataSyncVanToServer { @Operation(summary = "Sync data from van-to-server") @PostMapping(value = { "/van-to-server" }, consumes = "application/json", produces = "application/json") public String dataSyncToServer(@RequestBody String requestOBJ, - @RequestHeader(value = "Authorization") String Authorization) { + @RequestHeader(value = "Authorization") String Authorization, HttpServletRequest request) { OutputResponse response = new OutputResponse(); + + logger.info("test: vanto server auth="+Authorization); try { - String s = getDataFromVanAndSyncToDBImpl.syncDataToServer(requestOBJ, Authorization); + String jwtToken = CookieUtil.getJwtTokenFromCookie(request); + logger.info("test: vanto server token="+jwtToken); + + String s = getDataFromVanAndSyncToDBImpl.syncDataToServer(requestOBJ, Authorization, jwtToken); if (s != null) response.setResponse(s); else diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java index 6b62af69..175de980 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java @@ -25,6 +25,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Set; import javax.sql.DataSource; @@ -34,175 +35,186 @@ import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Service; -/*** - * - * @author NE298657 - * - */ +import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; @Service public class DataSyncRepositoryCentral { - @Autowired - private DataSource dataSource; - - private JdbcTemplate jdbcTemplate; - - private JdbcTemplate getJdbcTemplate() { - return new JdbcTemplate(dataSource); - - } - - private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); - - // Data Upload Repository - public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, - String vanAutoIncColumnName, int syncFacilityID) { - jdbcTemplate = getJdbcTemplate(); - - List params = new ArrayList<>(); - - StringBuilder queryBuilder = new StringBuilder("SELECT "); - queryBuilder.append(vanAutoIncColumnName); - queryBuilder.append(" FROM "); - queryBuilder.append(schemaName+"."+tableName); - - //params.add(vanAutoIncColumnName); - //params.add(schemaName); - //params.add(tableName); - - StringBuilder whereClause = new StringBuilder(); - whereClause.append(" WHERE "); - whereClause.append("VanSerialNo = ?"); - params.add(vanSerialNo); - - if ((tableName.equalsIgnoreCase("t_patientissue") || tableName.equalsIgnoreCase("t_physicalstockentry") - || tableName.equalsIgnoreCase("t_stockadjustment") || tableName.equalsIgnoreCase("t_saitemmapping") - || tableName.equalsIgnoreCase("t_stocktransfer") || tableName.equalsIgnoreCase("t_patientreturn") - || tableName.equalsIgnoreCase("t_facilityconsumption") || tableName.equalsIgnoreCase("t_indent") - || tableName.equalsIgnoreCase("t_indentorder") || tableName.equalsIgnoreCase("t_indentissue") - || tableName.equalsIgnoreCase("t_itemstockentry") || tableName.equalsIgnoreCase("t_itemstockexit")) - && syncFacilityID > 0) { - - whereClause.append(" AND "); - whereClause.append("SyncFacilityID = ?"); - params.add(syncFacilityID); - - } - - else { - - whereClause.append(" AND "); - whereClause.append("VanID = ?"); - params.add(vanID); - - } - - queryBuilder.append(whereClause); - String query = queryBuilder.toString(); - Object[] queryParams = params.toArray(); - List> resultSet = jdbcTemplate.queryForList(query, queryParams); - if (resultSet != null && resultSet.size() > 0) - return 1; - else - return 0; - } - - // Method for synchronization of data to central DB - public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, - List syncDataList) { - jdbcTemplate = getJdbcTemplate(); - if (query.startsWith("INSERT")) { - for (int i = 0; i < syncDataList.size(); i++) { - Object[] array = syncDataList.get(i);// Arrey 1 - - if (query.startsWith("INSERT")) { -// array = new Object[] {serverColumns, array }; - syncDataList.set(i, array); - } - } - } else { - for (int i = 0; i < syncDataList.size(); i++) { - - Object[] array = syncDataList.get(i);// Arrey 1 - String[] columnsArray = null; - if(null != serverColumns) - columnsArray = serverColumns.split(","); // arrey 2 - - List Newarray = new ArrayList<>(); - - int arrayIndex = 0; - int columnsArrayIndex = 0; - //Newarray.add(schema); - //Newarray.add(tableName); - //while (columnsArrayIndex < columnsArray.length || arrayIndex < array.length) { - if (null != columnsArray && columnsArrayIndex < columnsArray.length) { - Newarray.add(columnsArray[columnsArrayIndex]); - columnsArrayIndex++; - } - - /* - * if (arrayIndex < array.length) { Newarray.add(array); arrayIndex++; } - */ - //} - - // Convert Newarray back to an array - //Object[] resultArray = Newarray.toArray(new Object[0]); - syncDataList.set(i, array); - - } - } - // start batch insert/update - int[] i = jdbcTemplate.batchUpdate(query, syncDataList); - return i; - - } - - // End of Data Upload Repository - - public List> getMasterDataFromTable(String schema, String table, String columnNames, - String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) throws Exception { - jdbcTemplate = getJdbcTemplate(); - List> resultSetList =new ArrayList<>(); - String baseQuery = ""; - if (masterType != null) { - if (lastDownloadDate != null) { - if (masterType.equalsIgnoreCase("A")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE LastModDate >= ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate); - - } - else if (masterType.equalsIgnoreCase("V")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE LastModDate >= ? AND VanID = ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate,vanID); - } - else if (masterType.equalsIgnoreCase("P")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE LastModDate >= ? AND ProviderServiceMapID = ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate,psmID); - } - } else { - if (masterType.equalsIgnoreCase("A")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table; - resultSetList = jdbcTemplate.queryForList(baseQuery); - } - else if (masterType.equalsIgnoreCase("V")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + " WHERE VanID = ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,vanID); - } - else if (masterType.equalsIgnoreCase("P")) { - baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE ProviderServiceMapID = ? "; - resultSetList = jdbcTemplate.queryForList(baseQuery,psmID); - } - } - } - logger.info("Select query central: " + baseQuery); - logger.info("Last Downloaded Date " + lastDownloadDate); - logger.info("Result set Details: " + resultSetList); - return resultSetList; - } - - // End of Data Download Repository + @Autowired + private DataSource dataSource; + + private JdbcTemplate jdbcTemplate; + + private JdbcTemplate getJdbcTemplate() { + if (this.jdbcTemplate == null) { + this.jdbcTemplate = new JdbcTemplate(dataSource); + } + return this.jdbcTemplate; + } + + private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + + private static final Set VALID_SCHEMAS = Set.of("public", "db_iemr"); + + private static final Set VALID_TABLES = Set.of( + "m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", "i_beneficiarycontacts", + "i_beneficiarydetails", "i_beneficiaryfamilymapping", "i_beneficiaryidentity", "i_beneficiarymapping", + "t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", "t_pnccare", + "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", "t_physicalactivity", + "t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", "t_sys_cardiovascular", + "t_sys_respiratory", "t_sys_centralnervous", "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem", + "t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", "t_benclinicalobservation", + "t_prescription", "t_prescribeddrug", "t_lab_testorder", "t_benreferdetails", + "t_lab_testresult", "t_physicalstockentry", "t_patientissue", "t_facilityconsumption", "t_itemstockentry", + "t_itemstockexit", "t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", + "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", + "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", + "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory", + "t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", "t_cancerobstetrichistory", + "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", "t_canceroralexamination", + "t_cancerbreastexamination", "t_cancerabdominalexamination", "t_cancergynecologicalexamination", + "t_cancerdiagnosis", "t_cancerimageannotation", "i_beneficiaryimage", "t_stockadjustment", + "t_stocktransfer", "t_patientreturn", "t_indent", "t_indentissue", "t_indentorder", "t_saitemmapping" + ); + + private boolean isValidDatabaseIdentifierCharacter(String identifier) { + return identifier != null && identifier.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); + } + + private boolean isValidSchemaName(String schemaName) { + return VALID_SCHEMAS.contains(schemaName.toLowerCase()); + } + + private boolean isValidTableName(String tableName) { + return VALID_TABLES.contains(tableName.toLowerCase()); + } + + private boolean isValidColumnNamesList(String columnNames) { + if (columnNames == null || columnNames.trim().isEmpty()) { + return false; + } + for (String col : columnNames.split(",")) { + if (!isValidDatabaseIdentifierCharacter(col.trim())) { + return false; + } + } + return true; + } + + public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, + String vanAutoIncColumnName, int syncFacilityID) { + jdbcTemplate = getJdbcTemplate(); + List params = new ArrayList<>(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName) || + !isValidDatabaseIdentifierCharacter(vanAutoIncColumnName)) { + logger.error("Invalid identifiers: schema={}, table={}, column={}", schemaName, tableName, vanAutoIncColumnName); + throw new IllegalArgumentException("Invalid identifiers provided."); + } + + StringBuilder queryBuilder = new StringBuilder("SELECT ") + .append(vanAutoIncColumnName).append(" FROM ") + .append(schemaName).append(".").append(tableName).append(" WHERE VanSerialNo = ?"); + + params.add(vanSerialNo); + + if (List.of("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit").contains(tableName.toLowerCase()) && syncFacilityID > 0) { + queryBuilder.append(" AND SyncFacilityID = ?"); + params.add(syncFacilityID); + } else { + queryBuilder.append(" AND VanID = ?"); + params.add(vanID); + } + + try { + List> resultSet = jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); + return (resultSet != null && !resultSet.isEmpty()) ? 1 : 0; + } catch (Exception e) { + logger.error("Error checking record presence: {}", e.getMessage(), e); + throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); + } + } + + public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, + List syncDataList) { + jdbcTemplate = getJdbcTemplate(); + try { + return jdbcTemplate.batchUpdate(query, syncDataList); + } catch (Exception e) { + logger.error("Batch sync failed for table {}: {}", tableName, e.getMessage(), e); + throw new RuntimeException("Batch sync failed: " + e.getMessage(), e); + } + } + + public List> getMasterDataFromTable(String schema, String table, String columnNames, + String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) { + jdbcTemplate = getJdbcTemplate(); + List params = new ArrayList<>(); + + if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { + throw new IllegalArgumentException("Invalid schema, table, or column names."); + } + + StringBuilder queryBuilder = new StringBuilder("SELECT ").append(columnNames) + .append(" FROM ").append(schema).append(".").append(table); + + if (masterType != null) { + if (lastDownloadDate != null) { + queryBuilder.append(" WHERE LastModDate >= ?"); + params.add(lastDownloadDate); + + if ("V".equalsIgnoreCase(masterType)) { + queryBuilder.append(" AND VanID = ?"); + params.add(vanID); + } else if ("P".equalsIgnoreCase(masterType)) { + queryBuilder.append(" AND ProviderServiceMapID = ?"); + params.add(psmID); + } + } else { + queryBuilder.append(" WHERE "); + if ("V".equalsIgnoreCase(masterType)) { + queryBuilder.append("VanID = ?"); + params.add(vanID); + } else if ("P".equalsIgnoreCase(masterType)) { + queryBuilder.append("ProviderServiceMapID = ?"); + params.add(psmID); + } + } + } + + try { + // Safe dynamic SQL: All dynamic parts (table names, columns, etc.) are validated or hardcoded. + // Parameter values are bound safely using prepared statement placeholders (?). + return jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); + } catch (Exception e) { + logger.error("Error fetching master data: {}", e.getMessage(), e); + throw new RuntimeException("Failed to fetch master data: " + e.getMessage(), e); + } + } + + public List> getBatchForBenDetails(SyncUploadDataDigester digester, + String whereClause, int limit, int offset) { + jdbcTemplate = getJdbcTemplate(); + +String schema = digester.getSchemaName(); + String table = digester.getTableName(); + String columnNames = digester.getServerColumns(); + + + if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { + throw new IllegalArgumentException("Invalid schema, table, or column names."); + } + // Safe dynamic SQL: Schema, table, and column names are validated against predefined whitelists. + // Only trusted values are used in the query string. + // limit and offset are passed as parameters to prevent SQL injection. + String query = String.format("SELECT %s FROM %s.%s %s LIMIT ? OFFSET ?", columnNames, schema, table, whereClause); //NOSONAR + + try { + return jdbcTemplate.queryForList(query, limit, offset); + } catch (Exception e) { + logger.error("Error fetching batch details: {}", e.getMessage(), e); + throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); + } + } } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index d70404bb..2d88e6f6 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -21,365 +21,625 @@ */ package com.iemr.mmu.service.dataSyncLayerCentral; -import java.sql.Timestamp; -import java.text.DateFormat; -import java.text.SimpleDateFormat; import java.time.LocalDateTime; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.HashMap; +import java.util.Set; +import java.util.HashSet; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.fasterxml.jackson.databind.ObjectMapper; import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; -import com.iemr.mmu.utils.mapper.InputMapper; -/*** - * - * @author NE298657 - * - */ @Service public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { - private static final String ServerColumnsNotRequired = null; - @Autowired - private DataSyncRepositoryCentral dataSyncRepositoryCentral; - - public String syncDataToServer(String requestOBJ, String Authorization) throws Exception { - - // feed sync request - ObjectMapper mapper = new ObjectMapper(); - SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); - /* - * SyncUploadDataDigester syncUploadDataDigester = - * InputMapper.gson().fromJson(requestOBJ, SyncUploadDataDigester.class); - */ - String syncTableName = syncUploadDataDigester.getTableName(); - if (syncUploadDataDigester != null && syncTableName != null - && syncTableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { - String s = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester); - return s; - } else { - - List> dataToBesync = syncUploadDataDigester.getSyncData(); - - Object[] objArr; - - // sync data 'list of object array' - List syncDataListInsert = new ArrayList<>(); - List syncDataListUpdate = new ArrayList<>(); - - int pointer; - String vanSerialNo; - String vanID; - int recordCheck; - int syncFacilityID = 0; - - for (Map map : dataToBesync) { - pointer = 0; - recordCheck = 0; - vanSerialNo = ""; - vanID = ""; - - vanSerialNo = String.valueOf(map.get(syncUploadDataDigester.getVanAutoIncColumnName())); - vanID = String.valueOf(map.get("VanID")); - - map.replace("SyncedBy", syncUploadDataDigester.getSyncedBy()); - - map.replace("date_format(SyncedDate,'%Y-%m-%d %H:%i:%s')", String.valueOf(LocalDateTime.now())); - - if (syncUploadDataDigester.getFacilityID() != null) { - Double changeDoubleToIntegerID = 0.0; - switch (syncTableName) { - case "t_indent": { - if (map.containsKey("FromFacilityID") && map.get("FromFacilityID") != null) { - changeDoubleToIntegerID = (Double) map.get("FromFacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - - } - case "t_indentorder": { - if (map.containsKey("FromFacilityID") && map.get("FromFacilityID") != null) - changeDoubleToIntegerID = (Double) map.get("FromFacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - case "t_indentissue": { - if (map.containsKey("ToFacilityID") && map.get("ToFacilityID") != null) { - changeDoubleToIntegerID = (Double) map.get("ToFacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - - } - // here a change in rule, will compare with toFacilityID - case "t_stocktransfer": { - if (map.containsKey("TransferToFacilityID") && map.get("TransferToFacilityID") != null) { - changeDoubleToIntegerID = (Double) map.get("TransferToFacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - - } - case "t_itemstockentry": { - - if (map.containsKey("FacilityID") && map.get("FacilityID") != null) { - changeDoubleToIntegerID = (Double) map.get("FacilityID"); - if (changeDoubleToIntegerID.intValue() == syncUploadDataDigester.getFacilityID()) - map.replace("Processed", "P"); - } - - } - default: - - } - - } - - if (map.containsKey("SyncFacilityID")) { - //double syncFaciltyID = (double) map.get("SyncFacilityID"); - syncFacilityID = (int) map.get("SyncFacilityID"); - } - - recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - syncUploadDataDigester.getSchemaName(), syncUploadDataDigester.getTableName(), vanSerialNo, - vanID, syncUploadDataDigester.getVanAutoIncColumnName(), syncFacilityID); - - if (recordCheck == 0) { - objArr = new Object[map.size()]; - } else { - objArr = new Object[map.size() + 2]; - } - - for (Map.Entry entry : map.entrySet()) { - if (entry.getValue() != null) { - if (String.valueOf(entry.getValue()).equalsIgnoreCase("false") - || String.valueOf(entry.getValue()).equalsIgnoreCase("true")) - objArr[pointer] = entry.getValue(); - else - objArr[pointer] = String.valueOf(entry.getValue()); - } else - objArr[pointer] = entry.getValue(); - - pointer++; - } - - if (recordCheck == 0) { - syncDataListInsert.add(objArr); - } else { - - - objArr[pointer] = String.valueOf(vanSerialNo); - - if ((syncTableName.equalsIgnoreCase("t_patientissue") - || syncTableName.equalsIgnoreCase("t_physicalstockentry") - || syncTableName.equalsIgnoreCase("t_stockadjustment") - || syncTableName.equalsIgnoreCase("t_saitemmapping") - || syncTableName.equalsIgnoreCase("t_stocktransfer") - || syncTableName.equalsIgnoreCase("t_patientreturn") - || syncTableName.equalsIgnoreCase("t_facilityconsumption") - || syncTableName.equalsIgnoreCase("t_indent") - || syncTableName.equalsIgnoreCase("t_indentorder") - || syncTableName.equalsIgnoreCase("t_indentissue") - || syncTableName.equalsIgnoreCase("t_itemstockentry") - || syncTableName.equalsIgnoreCase("t_itemstockexit")) - && map.containsKey("SyncFacilityID")) { - - objArr[pointer + 1] = String.valueOf(map.get("SyncFacilityID")); - } else - objArr[pointer + 1] = String.valueOf(vanID); - - syncDataListUpdate.add(objArr); - } - - } - - int[] i = null; - if (syncDataListInsert != null && syncDataListInsert.size() > 0) { - // schema name hard coded(Insert query builder) - String queryInsert = getQueryToInsertDataToServerDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(),syncUploadDataDigester.getServerColumns()); - - // call repository to execute the query with given data list(Insert) - i = dataSyncRepositoryCentral.syncDataToCentralDB( - syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), queryInsert, - syncDataListInsert); - } - - int[] j = null; - if (syncDataListUpdate != null && syncDataListUpdate.size() > 0) { - // schema name hard coded(Update query builder) - String queryUpdate = getQueryToUpdateDataToServerDB(syncUploadDataDigester.getSchemaName(), syncUploadDataDigester.getServerColumns(), - syncUploadDataDigester.getTableName()); - - // call repository to execute the query with given data list(Update) - j = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), ServerColumnsNotRequired, queryUpdate, - syncDataListUpdate); - } - - // validating if data sync successfully - if ((i != null && syncDataListInsert.size() != i.length) - || (j != null && syncDataListUpdate.size() != j.length)) - return null; - else - return "data sync passed"; - - } - - } - - public String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( - SyncUploadDataDigester syncUploadDataDigester) { - String returnOBJ = null; - List> dataToBesync = syncUploadDataDigester.getSyncData(); - - Object[] objArr; - // sync data 'list of object array' - List syncData = new ArrayList<>(); - - String query = getqueryFor_M_BeneficiaryRegIdMapping(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName()); - - for (Map map : dataToBesync) { - if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { - objArr = new Object[4]; - objArr[0] = String.valueOf(syncUploadDataDigester.getSyncedBy()); - objArr[1] = String.valueOf(map.get("BenRegId")); - objArr[2] = String.valueOf(map.get("BeneficiaryID")); - objArr[3] = String.valueOf(map.get("VanID")); - - syncData.add(objArr); - } - } - int[] i = null; - - if (syncData != null && syncData.size() > 0) { - i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), ServerColumnsNotRequired, query, syncData); - - if (i.length == syncData.size()) { - returnOBJ = "data sync passed"; - } - } else { - returnOBJ = "data sync passed"; - } - - return returnOBJ; - - } - - private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String tableName) { - - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName+"."+tableName); - queryBuilder.append(" SET "); - queryBuilder.append("Provisioned = true, SyncedDate = now(), syncedBy = ?"); - queryBuilder.append(" WHERE "); - queryBuilder.append(" BenRegId = ? "); - queryBuilder.append(" AND "); - queryBuilder.append(" BeneficiaryID = ? "); - queryBuilder.append(" AND "); - queryBuilder.append(" VanID = ? "); - String query = queryBuilder.toString(); - return query; - } - - public String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); - - StringBuilder preparedStatementSetter = new StringBuilder(); - /// StringBuilder updateStatement = new StringBuilder(); - - if (columnsArr != null && columnsArr.length > 0) { - int index = 0; - for (String column : columnsArr) { - if (index == columnsArr.length - 1) { - preparedStatementSetter.append(" ? "); - - } else { - preparedStatementSetter.append(" ?, "); - - } - index++; - } - } - /* - * String query = "INSERT INTO " + schemaName + "." + tableName + "( " + - * serverColumns + ") VALUES ( " + preparedStatementSetter + " ) "; - */ - - StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); - queryBuilder.append(schemaName + "." + tableName); - queryBuilder.append("("); -// queryBuilder.append("?"); - queryBuilder.append(serverColumns); - queryBuilder.append(") VALUES ("); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(") "); - String query = queryBuilder.toString(); - - return query; - } - - public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); - - StringBuilder preparedStatementSetter = new StringBuilder(); - - if (columnsArr != null && columnsArr.length > 0) { - int index = 0; - for (String column : columnsArr) { - if (index == columnsArr.length - 1) { - preparedStatementSetter.append(column); - preparedStatementSetter.append("= ?"); - } else { - preparedStatementSetter.append(column); - preparedStatementSetter.append("= ?, "); - } - index++; - } - } - - if (tableName.equalsIgnoreCase("t_patientissue") || tableName.equalsIgnoreCase("t_physicalstockentry") - || tableName.equalsIgnoreCase("t_stockadjustment") || tableName.equalsIgnoreCase("t_saitemmapping") - || tableName.equalsIgnoreCase("t_stocktransfer") || tableName.equalsIgnoreCase("t_patientreturn") - || tableName.equalsIgnoreCase("t_facilityconsumption") || tableName.equalsIgnoreCase("t_indent") - || tableName.equalsIgnoreCase("t_indentorder") || tableName.equalsIgnoreCase("t_indentissue") - || tableName.equalsIgnoreCase("t_itemstockentry") || tableName.equalsIgnoreCase("t_itemstockexit")) { - - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName+"."+tableName); - queryBuilder.append(" SET "); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(" WHERE "); - queryBuilder.append(" VanSerialNo =? "); - queryBuilder.append(" AND "); - queryBuilder.append(" SyncFacilityID = ? "); - String query = queryBuilder.toString(); - return query; - } else { - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName+"."+tableName); - queryBuilder.append(" SET "); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(" WHERE "); - queryBuilder.append(" VanSerialNo =? "); - queryBuilder.append(" AND "); - queryBuilder.append(" VanID = ? "); - String query = queryBuilder.toString(); - return query; - } - - } + private static final String SERVER_COLUMNS_NOT_REQUIRED = null; // Renamed for clarity + private static final Logger logger = LoggerFactory.getLogger(GetDataFromVanAndSyncToDBImpl.class); + + @Autowired + private DataSyncRepositoryCentral dataSyncRepositoryCentral; + + private static final Map> TABLE_GROUPS = new HashMap<>(); + private static final Set VALID_SCHEMAS = new HashSet<>(Arrays.asList("public", "db_iemr")); // Add your actual schema names + private static final Set VALID_TABLES = new HashSet<>(Arrays.asList( + "m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping", + "t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity", + "t_phy_generalexam","t_phy_headtotoe","t_sys_obstetric","t_sys_gastrointestinal","t_sys_cardiovascular","t_sys_respiratory","t_sys_centralnervous","t_sys_musculoskeletalsystem","t_sys_genitourinarysystem", + "t_ancdiagnosis","t_ncddiagnosis","t_pncdiagnosis","t_benchefcomplaint","t_benclinicalobservation","t_prescription","t_prescribeddrug","t_lab_testorder","t_benreferdetails", + "t_lab_testresult","t_physicalstockentry","t_patientissue","t_facilityconsumption","t_itemstockentry","t_itemstockexit", + "t_benmedhistory","t_femaleobstetrichistory","t_benmenstrualdetails","t_benpersonalhabit","t_childvaccinedetail1","t_childvaccinedetail2","t_childoptionalvaccinedetail","t_ancwomenvaccinedetail","t_childfeedinghistory","t_benallergyhistory","t_bencomorbiditycondition","t_benmedicationhistory","t_benfamilyhistory","t_perinatalhistory","t_developmenthistory", + "t_cancerfamilyhistory","t_cancerpersonalhistory","t_cancerdiethistory","t_cancerobstetrichistory","t_cancervitals","t_cancersignandsymptoms","t_cancerlymphnode","t_canceroralexamination","t_cancerbreastexamination","t_cancerabdominalexamination","t_cancergynecologicalexamination","t_cancerdiagnosis","t_cancerimageannotation", + "i_beneficiaryimage", + "t_stockadjustment","t_stocktransfer","t_patientreturn","t_indent","t_indentissue","t_indentorder","t_saitemmapping" + )); + + static { + + TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); + + TABLE_GROUPS.put(2, Arrays.asList("t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity")); + + TABLE_GROUPS.put(3, Arrays.asList("t_phy_generalexam","t_phy_headtotoe","t_sys_obstetric","t_sys_gastrointestinal","t_sys_cardiovascular","t_sys_respiratory","t_sys_centralnervous","t_sys_musculoskeletalsystem","t_sys_genitourinarysystem")); + + TABLE_GROUPS.put(4, Arrays.asList("t_ancdiagnosis","t_ncddiagnosis","t_pncdiagnosis","t_benchefcomplaint","t_benclinicalobservation","t_prescription","t_prescribeddrug","t_lab_testorder","t_benreferdetails")); + + TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult","t_physicalstockentry","t_patientissue","t_facilityconsumption","t_itemstockentry","t_itemstockexit")); + + TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory","t_femaleobstetrichistory","t_benmenstrualdetails","t_benpersonalhabit","t_childvaccinedetail1","t_childvaccinedetail2","t_childoptionalvaccinedetail","t_ancwomenvaccinedetail","t_childfeedinghistory","t_benallergyhistory","t_bencomorbiditycondition","t_benmedicationhistory","t_benfamilyhistory","t_perinatalhistory","t_developmenthistory")); + + TABLE_GROUPS.put(7, Arrays.asList("t_cancerfamilyhistory","t_cancerpersonalhistory","t_cancerdiethistory","t_cancerobstetrichistory","t_cancervitals","t_cancersignandsymptoms","t_cancerlymphnode","t_canceroralexamination","t_cancerbreastexamination","t_cancerabdominalexamination","t_cancergynecologicalexamination","t_cancerdiagnosis","t_cancerimageannotation")); + + TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); + + TABLE_GROUPS.put(9, Arrays.asList("t_itemstockentry","t_itemstockexit","t_patientissue","t_physicalstockentry","t_stockadjustment","t_stocktransfer","t_patientreturn","t_facilityconsumption","t_indent","t_indentissue","t_indentorder","t_saitemmapping")); + + } + + public String syncDataToServer(String requestOBJ, String Authorization, String token) throws Exception { + logger.info("Starting syncDataToServer. Token: {}", token); + ObjectMapper mapper = new ObjectMapper(); + SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); + + if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { + logger.error("Invalid SyncUploadDataDigester object or tableName is null."); + return "Error: Invalid sync request."; + } + + String syncTableName = syncUploadDataDigester.getTableName(); + String schemaName = syncUploadDataDigester.getSchemaName(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { + logger.error("Invalid schema or table name provided: Schema='{}', Table='{}'.", schemaName, syncTableName); + return "Error: Invalid schema or table name."; + } + + + // Handle specific tables first, if their logic is distinct + if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { + String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester); + if ("data sync passed".equals(result)) { + return "Sync successful for m_beneficiaryregidmapping."; + } else { + logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); + return "Sync failed for m_beneficiaryregidmapping."; + } + } + if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { + String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); + if ("data sync passed".equals(result)) { + return "Sync successful for i_beneficiarydetails."; + } else { + logger.error("Sync failed for i_beneficiarydetails: {}", result); + return "Sync failed for i_beneficiarydetails."; + } + } else { + // Determine the group for the current table or iterate through all if no specific table is given + boolean syncSuccess = true; + String errorMessage = ""; + + // If a specific table is provided in the request, try to find its group and sync only that table. + // Otherwise, iterate through all defined groups. + if (syncTableName != null && !syncTableName.isEmpty()) { + boolean foundInGroup = false; + for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { + if (entry.getValue().contains(syncTableName.toLowerCase())) { + logger.info("Attempting to sync table '{}' from Group {}", syncTableName, entry.getKey()); + syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, syncUploadDataDigester); + foundInGroup = true; + break; + } + } + if (!foundInGroup) { + logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", syncTableName); + syncSuccess = performGenericTableSync(syncUploadDataDigester); + } + } else { + // If no specific table is in the request (e.g., a general sync trigger), iterate through groups + logger.info("No specific table provided. Attempting to sync all tables group by group."); + for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { + Integer groupId = entry.getKey(); + List tablesInGroup = entry.getValue(); + logger.info("Starting sync for Group {}", groupId); + for (String table : tablesInGroup) { + if (!isValidTableName(table)) { + logger.error("Invalid table name '{}' encountered in group {}. Skipping.", table, groupId); + syncSuccess = false; + errorMessage += "Invalid table name: " + table + " in Group " + groupId + ". "; + continue; // Skip this table + } + + try { + + boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), table, syncUploadDataDigester); + if (!currentTableSyncResult) { + syncSuccess = false; + errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; + logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, errorMessage); + } else { + logger.info("Successfully synced table: {} in Group {}", table, groupId); + } + } catch (Exception e) { + syncSuccess = false; + errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + e.getMessage() + ". "; + logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, e.getMessage(), e); + } + } + } + } + + if (syncSuccess) { + logger.info("Overall data sync passed."); + return "Overall data sync passed."; + } else { + logger.info("Overall data sync failed. Details: " + errorMessage); + return "Overall data sync failed. Details: " + errorMessage; + } + } + } + + + private boolean syncTablesInGroup(String schemaName, String currentTableName, SyncUploadDataDigester originalDigester) { + logger.info("Attempting generic sync for table: {}", currentTableName); + + // Validate schemaName and currentTableName for safety before proceeding + if (!isValidSchemaName(schemaName) || !isValidTableName(currentTableName)) { + logger.error("Invalid schema or table name for group sync: Schema='{}', Table='{}'.", schemaName, currentTableName); + return false; // Fail fast if identifiers are invalid + } + + SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); + tableSpecificDigester.setSchemaName(schemaName); + tableSpecificDigester.setTableName(currentTableName); + tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); + tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); + tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); + tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is generic or set per table + + tableSpecificDigester.setSyncData(originalDigester.getSyncData()); // Placeholder: Replace with actual data fetching + + return performGenericTableSync(tableSpecificDigester); + } + + + private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUploadDataDigester syncUploadDataDigester) { + logger.info("Processing update_M_BeneficiaryRegIdMapping_for_provisioned_benID for table: {}", syncUploadDataDigester.getTableName()); + + String schemaName = syncUploadDataDigester.getSchemaName(); + String tableName = syncUploadDataDigester.getTableName(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name provided for m_beneficiaryregidmapping update: Schema='{}', Table='{}'.", schemaName, tableName); + return "Error: Invalid schema or table name."; + } + + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncData = new ArrayList<>(); + + String query = String.format("UPDATE %s.%s SET Provisioned = true, SyncedDate = now(), SyncedBy = ? WHERE BenRegId = ? AND BeneficiaryID = ? AND VanID = ?", schemaName, tableName); + + for (Map map : dataToBesync) { + if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { + Object[] objArr = new Object[4]; + objArr[0] = syncUploadDataDigester.getSyncedBy(); // SyncedBy + objArr[1] = String.valueOf(map.get("BenRegId")); + objArr[2] = String.valueOf(map.get("BeneficiaryID")); + objArr[3] = String.valueOf(map.get("VanID")); + syncData.add(objArr); + } else { + logger.warn("Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", map); + } + } + + if (!syncData.isEmpty()) { + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, + tableName, SERVER_COLUMNS_NOT_REQUIRED, query, syncData); + + if (i.length == syncData.size()) { + logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); + return "data sync passed"; + } else { + logger.error("Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", syncData.size(), i.length, getFailedRecords(i, syncData)); + return "Partial data sync for m_beneficiaryregidmapping."; + } + } catch (Exception e) { + logger.error("Exception during update for m_beneficiaryregidmapping: {}", e.getMessage(), e); + return "Error during sync for m_beneficiaryregidmapping: " + e.getMessage(); + } + } else { + logger.info("No data to sync for m_beneficiaryregidmapping."); + return "data sync passed"; + } + } + + + public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { + logger.info("Processing update_I_BeneficiaryDetails_for_processed_in_batches for table: {}", syncUploadDataDigester.getTableName()); + String schemaName = syncUploadDataDigester.getSchemaName(); + String tableName = syncUploadDataDigester.getTableName(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name provided for i_beneficiarydetails update: Schema='{}', Table='{}'.", schemaName, tableName); + return "Error: Invalid schema or table name."; + } + + List syncData = new ArrayList<>(); // This list will hold data for batch updates to 'Processed' + + String updateQuery = getQueryFor_I_BeneficiaryDetails(schemaName, tableName); + + int limit = 1000; + int offset = 0; + int totalProcessed = 0; + + String whereClauseForBatchFetch = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // This is for fetching, not for update + + while (true) { + List> batchToFetch; + try { + batchToFetch = dataSyncRepositoryCentral.getBatchForBenDetails( + syncUploadDataDigester, + whereClauseForBatchFetch, + limit, + offset); + } catch (Exception e) { + logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); + } + + if (batchToFetch.isEmpty()) { + break; + } + + for (Map map : batchToFetch) { + if (map.get("BeneficiaryDetailsId") != null && map.get("VanID") != null) { + Object[] params = new Object[3]; + params[0] = syncUploadDataDigester.getSyncedBy(); + params[1] = String.valueOf(map.get("BeneficiaryDetailsId")); + params[2] = String.valueOf(map.get("VanID")); + syncData.add(params); + } else { + logger.warn("Skipping record in i_beneficiarydetails due to missing BeneficiaryDetailsId or VanID: {}", map); + } + } + + if (!syncData.isEmpty()) { + try { + int[] batchUpdateResults = dataSyncRepositoryCentral.syncDataToCentralDB( + schemaName, + tableName, + SERVER_COLUMNS_NOT_REQUIRED, + updateQuery, + syncData); + + int successfulUpdates = 0; + for (int result : batchUpdateResults) { + if (result >= 1) { + successfulUpdates++; + } + } + totalProcessed += successfulUpdates; + logger.info("Batch update for i_beneficiarydetails: {} records processed, {} successfully updated.", syncData.size(), successfulUpdates); + + syncData.clear(); + offset += limit; + + } catch (Exception e) { + logger.error("Exception during batch update for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error during sync for i_beneficiarydetails: " + e.getMessage(); + } + } else { + logger.info("No valid records in the current batch for i_beneficiarydetails to update."); + offset += limit; + } + } + + if (totalProcessed > 0) { + logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); + return "data sync passed"; + } else { + logger.info("No records were processed for i_beneficiarydetails."); + return "No data processed for i_beneficiarydetails."; + } + } + + private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name for getQueryFor_I_BeneficiaryDetails: Schema='{}', Table='{}'.", schemaName, tableName); + throw new IllegalArgumentException("Invalid schema or table name provided."); + } + return String.format("UPDATE %s.%s SET Processed = 'P', SyncedDate = now(), SyncedBy = ? WHERE BeneficiaryDetailsId = ? AND VanID = ?", schemaName, tableName); + } + + + + private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { + logger.info("Performing generic sync for table: {}", syncUploadDataDigester.getTableName()); + + String schemaName = syncUploadDataDigester.getSchemaName(); + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String serverColumns = syncUploadDataDigester.getServerColumns(); + + if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { + logger.error("Invalid schema or table name for generic sync: Schema='{}', Table='{}'.", schemaName, syncTableName); + return false; + } + + if (!isValidColumnNames(serverColumns)) { + logger.error("Invalid server columns provided for generic sync: {}", serverColumns); + return false; + } + + + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncDataListInsert = new ArrayList<>(); + List syncDataListUpdate = new ArrayList<>(); + + if (dataToBesync == null || dataToBesync.isEmpty()) { + logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); + return true; // Nothing to sync, consider it a success + } + + Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + + for (Map map : dataToBesync) { + String vanSerialNo = String.valueOf(map.get(vanAutoIncColumnName)); + String vanID = String.valueOf(map.get("VanID")); + int syncFacilityID = 0; + + map.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + map.put("SyncedDate", String.valueOf(LocalDateTime.now())); // Ensure column name matches DB + + if (facilityIDFromDigester != null) { + switch (syncTableName.toLowerCase()) { + case "t_indent": + case "t_indentorder": { + if (map.containsKey("FromFacilityID") && map.get("FromFacilityID") instanceof Double) { + Double fromFacilityID = (Double) map.get("FromFacilityID"); + if (fromFacilityID.intValue() == facilityIDFromDigester) { + map.put("Processed", "P"); + } + } + break; + } + case "t_indentissue": { + if (map.containsKey("ToFacilityID") && map.get("ToFacilityID") instanceof Double) { + Double toFacilityID = (Double) map.get("ToFacilityID"); + if (toFacilityID.intValue() == facilityIDFromDigester) { + map.put("Processed", "P"); + } + } + break; + } + case "t_stocktransfer": { + if (map.containsKey("TransferToFacilityID") && map.get("TransferToFacilityID") instanceof Double) { + Double transferToFacilityID = (Double) map.get("TransferToFacilityID"); + if (transferToFacilityID.intValue() == facilityIDFromDigester) { + map.put("Processed", "P"); + } + } + break; + } + case "t_itemstockentry": { + if (map.containsKey("FacilityID") && map.get("FacilityID") instanceof Double) { + Double mapFacilityID = (Double) map.get("FacilityID"); + if (mapFacilityID.intValue() == facilityIDFromDigester) { + map.put("Processed", "P"); + } + } + break; + } + default: + // No specific facility ID logic for other tables, maintain existing 'Processed' status or default + break; + } + } + + // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + if (map.containsKey("SyncFacilityID") && map.get("SyncFacilityID") instanceof Integer) { + syncFacilityID = (Integer) map.get("SyncFacilityID"); + } else if (map.containsKey("SyncFacilityID") && map.get("SyncFacilityID") instanceof Double) { + syncFacilityID = ((Double) map.get("SyncFacilityID")).intValue(); + } + + + int recordCheck; + try { + recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + } catch (Exception e) { + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", syncTableName, vanSerialNo, vanID, e.getMessage(), e); + return false; // Critical error, stop sync for this table + } + + // Prepare Object array for insert/update + Object[] objArr; + List serverColumnsList = Arrays.asList(serverColumns.split(",")); + List currentRecordValues = new ArrayList<>(); + + for (String column : serverColumnsList) { + Object value = map.get(column.trim()); + // Handle boolean conversion if necessary, though String.valueOf should generally work for prepared statements + if (value instanceof Boolean) { + currentRecordValues.add(value); + } else if (value != null) { + currentRecordValues.add(String.valueOf(value)); + } else { + currentRecordValues.add(null); + } + } + + objArr = currentRecordValues.toArray(); + + if (recordCheck == 0) { + syncDataListInsert.add(objArr); + } else { + // For update, append the WHERE clause parameters at the end of the array + List updateParams = new ArrayList<>(Arrays.asList(objArr)); + updateParams.add(String.valueOf(vanSerialNo)); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) && map.containsKey("SyncFacilityID")) { + updateParams.add(String.valueOf(map.get("SyncFacilityID"))); + } else { + updateParams.add(String.valueOf(vanID)); + } + syncDataListUpdate.add(updateParams.toArray()); + } + } + + boolean insertSuccess = true; + boolean updateSuccess = true; + + if (!syncDataListInsert.isEmpty()) { + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); + if (i.length != syncDataListInsert.size()) { + insertSuccess = false; + logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", syncTableName, syncDataListInsert.size(), i.length, getFailedRecords(i, syncDataListInsert)); + } else { + logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); + } + } catch (Exception e) { + insertSuccess = false; + logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + } + } + + if (!syncDataListUpdate.isEmpty()) { + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + try { + int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + if (j.length != syncDataListUpdate.size()) { + updateSuccess = false; + logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", syncTableName, syncDataListUpdate.size(), j.length, getFailedRecords(j, syncDataListUpdate)); + } else { + logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); + } + } catch (Exception e) { + updateSuccess = false; + logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + } + } + return insertSuccess && updateSuccess; + } + + private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name for getQueryToInsertDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); + throw new IllegalArgumentException("Invalid schema or table name provided."); + } + if (!isValidColumnNames(serverColumns)) { + logger.error("Invalid server columns provided for getQueryToInsertDataToServerDB: {}", serverColumns); + throw new IllegalArgumentException("Invalid column names provided."); + } + + + String[] columnsArr = serverColumns.split(","); + StringBuilder preparedStatementSetter = new StringBuilder(); + + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } + + return String.format("INSERT INTO %s.%s(%s) VALUES (%s)", schemaName, tableName, serverColumns, preparedStatementSetter.toString()); + } + + public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + logger.error("Invalid schema or table name for getQueryToUpdateDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); + throw new IllegalArgumentException("Invalid schema or table name provided."); + } + if (!isValidColumnNames(serverColumns)) { + logger.error("Invalid server columns provided for getQueryToUpdateDataToServerDB: {}", serverColumns); + throw new IllegalArgumentException("Invalid column names provided."); + } + + String[] columnsArr = serverColumns.split(","); + StringBuilder preparedStatementSetter = new StringBuilder(); + + for (int i = 0; i < columnsArr.length; i++) { + String column = columnsArr[i].trim(); + if (!isValidColumnName(column)) { + logger.error("Invalid individual column name encountered: {}", column); + throw new IllegalArgumentException("Invalid individual column name provided: " + column); + } + + preparedStatementSetter.append(column); + preparedStatementSetter.append(" = ?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } + + StringBuilder queryBuilder = new StringBuilder(); + queryBuilder.append(String.format("UPDATE %s.%s SET %s WHERE VanSerialNo = ?", schemaName, tableName, preparedStatementSetter.toString())); + + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(tableName.toLowerCase())) { + queryBuilder.append(" AND SyncFacilityID = ? "); + } else { + queryBuilder.append(" AND VanID = ? "); + } + return queryBuilder.toString(); + } + + private boolean isValidSchemaName(String schemaName) { + return VALID_SCHEMAS.contains(schemaName.toLowerCase()); + } + + private boolean isValidTableName(String tableName) { + return VALID_TABLES.contains(tableName.toLowerCase()); + } + + private boolean isValidColumnName(String columnName) { + return columnName != null && columnName.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); + } + + private boolean isValidColumnNames(String columnNames) { + if (columnNames == null || columnNames.trim().isEmpty()) { + return false; + } + String[] cols = columnNames.split(","); + for (String col : cols) { + if (!isValidColumnName(col.trim())) { + return false; + } + } + return true; + } + + + private String getFailedRecords(int[] results, List data) { + List failedRecordsInfo = new ArrayList<>(); + for (int k = 0; k < results.length; k++) { + if (results[k] < 1) { + String idInfo = "N/A"; + if (data.get(k) != null && data.get(k).length > 0) { + idInfo = "Record data size: " + data.get(k).length; + } + failedRecordsInfo.add("Record at index " + k + " (Info: " + idInfo + ")"); + } + } + return String.join("; ", failedRecordsInfo); + } } \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java index cf07391c..2cbab41d 100644 --- a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java +++ b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java @@ -1,3 +1,24 @@ +/* +* AMRIT – Accessible Medical Records via Integrated Technology +* Integrated EHR (Electronic Health Records) Solution +* +* Copyright (C) "Piramal Swasthya Management and Research Institute" +* +* This file is part of AMRIT. +* +* This program is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with this program. If not, see https://www.gnu.org/licenses/. +*/ package com.iemr.mmu.utils; import org.slf4j.Logger; @@ -23,16 +44,19 @@ public static HttpEntity createRequestEntity(Object body, String authori headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + authorization); } - if (jwtToken == null || jwtToken.isEmpty()) { - ServletRequestAttributes attrs = - (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); - if (attrs != null) { - HttpServletRequest request = attrs.getRequest(); - try { - jwtToken = CookieUtil.getJwtTokenFromCookie(request); - } catch (Exception e) { - logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); - } + ServletRequestAttributes attrs = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); + + if ((jwtToken == null || jwtToken.isEmpty()) && attrs != null) { + HttpServletRequest request = attrs.getRequest(); + try { + jwtToken = CookieUtil.getJwtTokenFromCookie(request); + } catch (Exception e) { + logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); + } + + String jwtTokenHeader = request.getHeader("JwtToken"); + if (jwtTokenHeader != null && !jwtTokenHeader.isEmpty()) { + jwtToken = jwtTokenHeader; } } @@ -41,7 +65,6 @@ public static HttpEntity createRequestEntity(Object body, String authori headers.add(HttpHeaders.COOKIE, "Jwttoken=" + jwtToken); } - return new HttpEntity<>(body, headers); } } \ No newline at end of file diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index d172f4aa..2e6ce84f 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -7,6 +7,8 @@ spring.datasource.tomcat.remove-abandoned=true spring.datasource.tomcat.remove-abandoned-timeout=1800 spring.datasource.tomcat.logAbandoned=true spring.datasource.continue-on-error=true +spring.datasource.tomcat.max-wait=60000 + ## below line added by neeraj for reset abandoned DB connection from connection pool spring.datasource.tomcat.jdbc-interceptors=ResetAbandonedTimer @@ -46,3 +48,8 @@ logging.level.org.springframework=INFO spring.main.allow-circular-references=true spring.main.allow-bean-definition-overriding=true + +spring.datasource.tomcat.testOnBorrow=true +spring.datasource.tomcat.validationQuery=SELECT 1 +spring.datasource.tomcat.validationInterval=30000 # 30 sec +logging.level.org.apache.tomcat.jdbc.pool=DEBUG From 2b7f4c3a8b74a64f1c18469cbbf2af4e12451967 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 30 Jul 2025 16:41:17 +0530 Subject: [PATCH 07/45] fix: add functionality to save the file ID's uploaded from doctor screen (#99) --- .../ncdscreening/NCDScreeningServiceImpl.java | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/main/java/com/iemr/mmu/service/ncdscreening/NCDScreeningServiceImpl.java b/src/main/java/com/iemr/mmu/service/ncdscreening/NCDScreeningServiceImpl.java index a395eec1..9f1018a7 100644 --- a/src/main/java/com/iemr/mmu/service/ncdscreening/NCDScreeningServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/ncdscreening/NCDScreeningServiceImpl.java @@ -1182,6 +1182,30 @@ public Long saveDoctorData(JsonObject requestOBJ, String Authorization) throws E TcSpecialistSlotBookingRequestOBJ tcSpecialistSlotBookingRequestOBJ = null; CommonUtilityClass commonUtilityClass = InputMapper.gson().fromJson(requestOBJ, CommonUtilityClass.class); + if (requestOBJ.has("visitDetails") && !requestOBJ.get("visitDetails").isJsonNull()) { + JsonObject visitWrapperObj = requestOBJ.getAsJsonObject("visitDetails"); + JsonObject visitDetailsObj = visitWrapperObj.getAsJsonObject("visitDetails"); + + if (visitDetailsObj.has("fileIDs") && visitDetailsObj.get("fileIDs").isJsonArray()) { + JsonArray fileIDs = visitDetailsObj.getAsJsonArray("fileIDs"); + StringBuilder fileIDBuilder = new StringBuilder(); + for (JsonElement fileIdElement : fileIDs) { + if (!fileIdElement.isJsonNull()) { + fileIDBuilder.append(fileIdElement.getAsString()).append(","); + } + } + + if (fileIDBuilder.length() > 0) { + fileIDBuilder.setLength(fileIDBuilder.length() - 1); + + benVisitDetailRepo.updateFileID( + fileIDBuilder.toString(), + commonUtilityClass.getBeneficiaryRegID(), + commonUtilityClass.getVisitCode() + ); + } + } +} if (commonUtilityClass != null && commonUtilityClass.getServiceID() != null && commonUtilityClass.getServiceID() == 4 && requestOBJ != null && requestOBJ.has("tcRequest") && requestOBJ.get("tcRequest") != null) { From 1aadead0c331f5c0c25f4a718ed72f425dc13bf6 Mon Sep 17 00:00:00 2001 From: 5Amogh Date: Thu, 31 Jul 2025 16:14:58 +0530 Subject: [PATCH 08/45] story: amm-1668 task - 1754 --- .../iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java b/src/main/java/com/iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java index d996f43e..d4adc10e 100644 --- a/src/main/java/com/iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java +++ b/src/main/java/com/iemr/mmu/repo/benFlowStatus/BeneficiaryFlowStatusRepo.java @@ -75,7 +75,7 @@ public int updateBenFlowStatusTMReferred(@Param("benFlowID") Long benFlowID, @Pa @Query("SELECT t.benFlowID, t.beneficiaryRegID, t.visitDate, t.benName, t.age, t.ben_age_val, t.genderID, t.genderName, " + " t.villageName, t.districtName, t.beneficiaryID, t.servicePointName, t.VisitReason, t.VisitCategory, t.benVisitID, " - + " t.registrationDate, t.benVisitDate, t.visitCode, t.consultationDate FROM BeneficiaryFlowStatus t " + + " t.registrationDate, t.benVisitDate, t.visitCode, t.consultationDate, t.fatherName, t.preferredPhoneNum FROM BeneficiaryFlowStatus t " + " Where t.beneficiaryRegID = :benRegID AND t.benFlowID = :benFlowID ") public ArrayList getBenDetailsForLeftSidePanel(@Param("benRegID") Long benRegID, @Param("benFlowID") Long benFlowID); From ea849644e4f6f47a582939cd33f49a5c3a005129 Mon Sep 17 00:00:00 2001 From: Amoghavarsh <93114621+5Amogh@users.noreply.github.com> Date: Fri, 1 Aug 2025 11:55:47 +0530 Subject: [PATCH 09/45] story: amm-1754 updated response including father name and phone no of the beneficiary (#102) * fix: amm-1754 changing the query to get the expected response similar to hwc * fix: amm-1754 compilation error fix * fix: amm-1754 argument issue fix * fix: amm-1754 argument issue fix * fix: amm-1754 argument issue fix --- .../iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java b/src/main/java/com/iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java index 4e32b433..0cce7849 100644 --- a/src/main/java/com/iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java +++ b/src/main/java/com/iemr/mmu/data/benFlowStatus/BeneficiaryFlowStatus.java @@ -324,7 +324,7 @@ public BeneficiaryFlowStatus() { public BeneficiaryFlowStatus(Long benFlowID, Long benRegID, Timestamp visitDate, String benName, String age, Integer ageVal, Short genderID, String genderName, String villageName, String districtName, Long beneficiaryID, String servicePoint, String VisitReason, String VisitCategory, Long benVisitID, - Timestamp regDate, Timestamp benVisitDate, Long visitCode, Timestamp consultationDate) { + Timestamp regDate, Timestamp benVisitDate, Long visitCode, Timestamp consultationDate, String fatherName, String preferredPhoneNum) { this.benFlowID = benFlowID; this.beneficiaryRegID = benRegID; this.serviceDate = benVisitDate; @@ -344,6 +344,8 @@ public BeneficiaryFlowStatus(Long benFlowID, Long benRegID, Timestamp visitDate, this.visitCode = visitCode; this.consultationDate = consultationDate; this.bloodGroup = null; + this.fatherName = fatherName; + this.preferredPhoneNum = preferredPhoneNum; } @@ -366,7 +368,8 @@ public static BeneficiaryFlowStatus getBeneficiaryFlowStatusForLeftPanel(ArrayLi (String) objArr[3], (String) objArr[4], (Integer) objArr[5], (Short) objArr[6], (String) objArr[7], (String) objArr[8], (String) objArr[9], (Long) objArr[10], (String) objArr[11], (String) objArr[12], (String) objArr[13], (Long) objArr[14], - (Timestamp) objArr[15], (Timestamp) objArr[16], (Long) objArr[17], (Timestamp) objArr[18]); + (Timestamp) objArr[15], (Timestamp) objArr[16], (Long) objArr[17], (Timestamp) objArr[18], + (String) objArr[19], (String) objArr[20]); } } return obj; From e1d6ece47f903b72b822f6b9699f95a29684982e Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Fri, 1 Aug 2025 14:49:24 +0530 Subject: [PATCH 10/45] Save the files uploaded from Doctor Screen (#100) * fix: add file path in cancer gynecological examination * fix: save the files uploaded from the doctor portal * fix: get file names in the response of gynecological examination --- .../CancerGynecologicalExamination.java | 18 ++++++++ .../cancerScreening/CSNurseServiceImpl.java | 43 +++++++++++++++++-- .../cancerScreening/CSServiceImpl.java | 5 +++ 3 files changed, 62 insertions(+), 4 deletions(-) diff --git a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java index a63bbc9b..4e96ba30 100644 --- a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java +++ b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java @@ -22,7 +22,9 @@ package com.iemr.mmu.data.doctor; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.List; +import java.util.Map; import jakarta.persistence.Column; import jakarta.persistence.Entity; @@ -37,6 +39,8 @@ import com.google.gson.annotations.Expose; import lombok.Data; +import lombok.Data; + @Entity @Data @Table(name = "t_cancergynecologicalexamination") @@ -104,6 +108,10 @@ public class CancerGynecologicalExamination { @Column(name = "FilePath") private String filePath; + @Expose + @Transient + private ArrayList> files; + @Expose @Column(name = "ExperiencedPostCoitalBleeding") private Boolean experiencedPostCoitalBleeding; @@ -430,4 +438,14 @@ public void setVisitCode(Long visitCode) { this.visitCode = visitCode; } + + public ArrayList> getFiles() { + return files; + } + + public void setFiles(ArrayList> files) { + this.files = files; + } + + } diff --git a/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java b/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java index b6b15f22..77364bbd 100644 --- a/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/cancerScreening/CSNurseServiceImpl.java @@ -63,10 +63,14 @@ import com.iemr.mmu.repo.nurse.BenPersonalCancerDietHistoryRepo; import com.iemr.mmu.repo.nurse.BenPersonalCancerHistoryRepo; import com.iemr.mmu.repo.nurse.BenVisitDetailRepo; +import com.iemr.mmu.utils.AESEncryption.AESEncryptionDecryption; @Service public class CSNurseServiceImpl implements CSNurseService { private final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); + + @Autowired + private AESEncryptionDecryption aESEncryptionDecryption; private BenFamilyCancerHistoryRepo benFamilyCancerHistoryRepo; private BenPersonalCancerHistoryRepo benPersonalCancerHistoryRepo; private BenPersonalCancerDietHistoryRepo benPersonalCancerDietHistoryRepo; @@ -593,10 +597,41 @@ public CancerBreastExamination getBenCancerBreastExaminationData(Long benRegID, } public CancerGynecologicalExamination getBenCancerGynecologicalExaminationData(Long benRegID, Long visitCode) { - CancerGynecologicalExamination cancerGynecologicalExamination = cancerGynecologicalExaminationRepo - .getBenCancerGynecologicalExaminationDetails(benRegID, visitCode); - return cancerGynecologicalExamination; - } + CancerGynecologicalExamination cancerGynecologicalExamination = cancerGynecologicalExaminationRepo + .getBenCancerGynecologicalExaminationDetails(benRegID, visitCode); + + if (cancerGynecologicalExamination != null) { + + String filePathStr = cancerGynecologicalExamination.getFilePath(); + + if (filePathStr != null && !filePathStr.trim().isEmpty()) { + ArrayList> fileList = new ArrayList<>(); + String[] fileIds = filePathStr.split(","); + + for (String str : fileIds) { + if (str != null && !str.trim().isEmpty()) { + try { + String decryptedFilePath = aESEncryptionDecryption.decrypt(str); // Decrypt + String[] tempArr = decryptedFilePath.split("/"); + String fileName = tempArr[tempArr.length - 1]; + + Map fileMap = new HashMap<>(); + fileMap.put("filePath", str); + fileMap.put("fileName", fileName); + + fileList.add(fileMap); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + cancerGynecologicalExamination.setFiles(fileList); + } + } + + return cancerGynecologicalExamination; +} public CancerSignAndSymptoms getBenCancerSignAndSymptomsData(Long benRegID, Long visitCode) { CancerSignAndSymptoms cancerSignAndSymptoms = cancerSignAndSymptomsRepo diff --git a/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java b/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java index 9a97fbcb..23fdf9d8 100644 --- a/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/cancerScreening/CSServiceImpl.java @@ -595,6 +595,11 @@ public int updateBenExaminationDetail(JsonObject jsnOBJ) throws Exception { CancerGynecologicalExamination cancerGynecologicalExamination = InputMapper.gson() .fromJson(jsnOBJ.get("gynecologicalDetails"), CancerGynecologicalExamination.class); + if (cancerGynecologicalExamination.getFileIDs() != null) { + cancerGynecologicalExamination.setFilePath( + String.join(",", cancerGynecologicalExamination.getFileIDs())); + } + int ID = cSNurseServiceImpl.updateCancerGynecologicalExaminationDetails(cancerGynecologicalExamination); if (ID > 0) { // gynecologicalDetails stored successfully... From 690e358b5c49560f60b8b4803fe63a9ca9784301 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 5 Aug 2025 14:29:45 +0530 Subject: [PATCH 11/45] fix: cherry-pick the commits from develop --- .../common/transaction/CommonServiceImpl.java | 2 +- .../com/iemr/mmu/utils/RestTemplateUtil.java | 24 +++++++++---------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java b/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java index 3c962bc1..bd4f7cc0 100644 --- a/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java @@ -650,7 +650,7 @@ public ArrayList getTmCaseSheet(BeneficiaryFlowStatus TmBenFlowOBJ, Bene headers.add("Cookie", "Jwttoken=" + jwtTokenFromCookie); // get TM case sheet by passing TM details - ResponseEntity response = restTemplatePost(tmCentralServer, Authorization, new Gson().toJson(tmReqObj), jwtTokenFromCookie); + ResponseEntity response = restTemplatePost(tmCentralServer, Authorization, new Gson().toJson(tmReqObj)); if (response.getStatusCodeValue() == 200 & response.hasBody()) { JsonObject jsnOBJ = getJsonObj(response); diff --git a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java index 2cbab41d..d1ea0efd 100644 --- a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java +++ b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java @@ -44,19 +44,16 @@ public static HttpEntity createRequestEntity(Object body, String authori headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + authorization); } - ServletRequestAttributes attrs = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); - - if ((jwtToken == null || jwtToken.isEmpty()) && attrs != null) { - HttpServletRequest request = attrs.getRequest(); - try { - jwtToken = CookieUtil.getJwtTokenFromCookie(request); - } catch (Exception e) { - logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); - } - - String jwtTokenHeader = request.getHeader("JwtToken"); - if (jwtTokenHeader != null && !jwtTokenHeader.isEmpty()) { - jwtToken = jwtTokenHeader; + if (jwtToken == null || jwtToken.isEmpty()) { + ServletRequestAttributes attrs = + (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); + if (attrs != null) { + HttpServletRequest request = attrs.getRequest(); + try { + jwtToken = CookieUtil.getJwtTokenFromCookie(request); + } catch (Exception e) { + logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); + } } } @@ -65,6 +62,7 @@ public static HttpEntity createRequestEntity(Object body, String authori headers.add(HttpHeaders.COOKIE, "Jwttoken=" + jwtToken); } + return new HttpEntity<>(body, headers); } } \ No newline at end of file From b384bc5c2f530368c7342ba07f56422b049180bf Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 1 Jul 2025 14:00:33 +0530 Subject: [PATCH 12/45] fix: cherry-pick commits from develop --- .../common/transaction/CommonServiceImpl.java | 2 +- ...wnloadDataFromServerTransactionalImpl.java | 20 +++++++++---------- .../GetMasterDataFromCentralForVanImpl.java | 5 +++++ 3 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java b/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java index bd4f7cc0..3c962bc1 100644 --- a/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java +++ b/src/main/java/com/iemr/mmu/service/common/transaction/CommonServiceImpl.java @@ -650,7 +650,7 @@ public ArrayList getTmCaseSheet(BeneficiaryFlowStatus TmBenFlowOBJ, Bene headers.add("Cookie", "Jwttoken=" + jwtTokenFromCookie); // get TM case sheet by passing TM details - ResponseEntity response = restTemplatePost(tmCentralServer, Authorization, new Gson().toJson(tmReqObj)); + ResponseEntity response = restTemplatePost(tmCentralServer, Authorization, new Gson().toJson(tmReqObj), jwtTokenFromCookie); if (response.getStatusCodeValue() == 200 & response.hasBody()) { JsonObject jsnOBJ = getJsonObj(response); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java index ee3c39a5..d2441802 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java @@ -87,7 +87,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri for (int i = 0; i < 5; i++) { switch (i) { case 0: { - obj = downloadDataFromCentral("db_iemr", "t_indent", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_indent", vanID, ServerAuthorization, token); List ids = new ArrayList(); Indent[] indentArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), Indent[].class, 1); List indentList = Arrays.asList(indentArr); @@ -109,13 +109,13 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri } indentRepo.saveAll(indentList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_indent", ids, ServerAuthorization, token); + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_indent", ids, ServerAuthorization, token); } break; } case 1: { - obj = downloadDataFromCentral("db_iemr", "t_indentorder", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_indentorder", vanID, ServerAuthorization, token); List ids = new ArrayList(); IndentOrder[] indentOrderArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), IndentOrder[].class, 1); @@ -136,12 +136,12 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri indentOrder.setProcessed("P"); } indentOrderRepo.saveAll(indentOrderList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_indentorder", ids, ServerAuthorization, token); + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_indentorder", ids, ServerAuthorization, token); } break; } case 2: { - obj = downloadDataFromCentral("db_iemr", "t_indentissue", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_indentissue", vanID, ServerAuthorization, token); List ids = new ArrayList(); IndentIssue[] indentIssueArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), IndentIssue[].class, 1); @@ -164,12 +164,12 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri } indentIssueRepo.saveAll(indentIssueList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_indentissue", ids, ServerAuthorization, token); + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_indentissue", ids, ServerAuthorization, token); } break; } case 3: { - obj = downloadDataFromCentral("db_iemr", "t_stocktransfer", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_stocktransfer", vanID, ServerAuthorization, token); List ids = new ArrayList(); T_StockTransfer[] stockTransferArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), T_StockTransfer[].class, 1); @@ -191,13 +191,13 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri } stockTransferRepo.saveAll(stockTransferList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_stocktransfer", ids, + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_stocktransfer", ids, ServerAuthorization, token); } break; } case 4: { - obj = downloadDataFromCentral("db_iemr", "t_itemstockentry", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr_sync", "t_itemstockentry", vanID, ServerAuthorization, token); List ids = new ArrayList(); ItemStockEntry[] itemStockEntryArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), ItemStockEntry[].class, 1); @@ -219,7 +219,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri } itemStockEntryRepo.saveAll(itemStockEntryList); - int updateFlag = updateProcessedFlagToCentral("db_iemr", "t_itemstockentry", ids, + int updateFlag = updateProcessedFlagToCentral("db_iemr_sync", "t_itemstockentry", ids, ServerAuthorization, token); } break; diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java index eaae79c3..6142d257 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java @@ -27,7 +27,10 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.iemr.mmu.data.syncActivity_syncLayer.SyncDownloadMaster; @@ -36,6 +39,7 @@ public class GetMasterDataFromCentralForVanImpl implements GetMasterDataFromCentralForVan { @Autowired private DataSyncRepositoryCentral dataSyncRepositoryCentral; + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); public String getMasterDataForVan(SyncDownloadMaster obj) throws Exception { List> resultSetList = new ArrayList<>(); @@ -56,6 +60,7 @@ public String getMasterDataForVan(SyncDownloadMaster obj) throws Exception { } private List> getMasterDataFromGivenTable(SyncDownloadMaster tableDetails) throws Exception { + logger.info("ger master data="+ tableDetails.getSchemaName()); List> resultSetList = new ArrayList<>(); resultSetList = dataSyncRepositoryCentral.getMasterDataFromTable(tableDetails.getSchemaName(), tableDetails.getTableName(), tableDetails.getServerColumnName(), tableDetails.getMasterType(), From 9571a35f77a36b578e41b0b50ff452a839ffb0fe Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Tue, 5 Aug 2025 19:03:02 +0530 Subject: [PATCH 13/45] Fix the Download Masters issue (#103) * fix: resolve the conflicts * fix: fix the issue in download masters table --- ...wnloadDataFromServerTransactionalImpl.java | 10 +- .../DataSyncRepositoryCentralDownload.java | 208 ++++++++++++++++++ .../GetMasterDataFromCentralForVanImpl.java | 6 +- 3 files changed, 218 insertions(+), 6 deletions(-) create mode 100644 src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentralDownload.java diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java index d2441802..5cd8d26d 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerTransactionalImpl.java @@ -87,7 +87,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri for (int i = 0; i < 5; i++) { switch (i) { case 0: { - obj = downloadDataFromCentral("db_iemr_sync", "t_indent", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_indent", vanID, ServerAuthorization, token); List ids = new ArrayList(); Indent[] indentArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), Indent[].class, 1); List indentList = Arrays.asList(indentArr); @@ -115,7 +115,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri break; } case 1: { - obj = downloadDataFromCentral("db_iemr_sync", "t_indentorder", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_indentorder", vanID, ServerAuthorization, token); List ids = new ArrayList(); IndentOrder[] indentOrderArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), IndentOrder[].class, 1); @@ -141,7 +141,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri break; } case 2: { - obj = downloadDataFromCentral("db_iemr_sync", "t_indentissue", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_indentissue", vanID, ServerAuthorization, token); List ids = new ArrayList(); IndentIssue[] indentIssueArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), IndentIssue[].class, 1); @@ -169,7 +169,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri break; } case 3: { - obj = downloadDataFromCentral("db_iemr_sync", "t_stocktransfer", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_stocktransfer", vanID, ServerAuthorization, token); List ids = new ArrayList(); T_StockTransfer[] stockTransferArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), T_StockTransfer[].class, 1); @@ -197,7 +197,7 @@ public int downloadTransactionalData(int vanID, String ServerAuthorization, Stri break; } case 4: { - obj = downloadDataFromCentral("db_iemr_sync", "t_itemstockentry", vanID, ServerAuthorization, token); + obj = downloadDataFromCentral("db_iemr", "t_itemstockentry", vanID, ServerAuthorization, token); List ids = new ArrayList(); ItemStockEntry[] itemStockEntryArr = InputMapper.gson(1).fromJson(String.valueOf(obj.get("data")), ItemStockEntry[].class, 1); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentralDownload.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentralDownload.java new file mode 100644 index 00000000..565e2466 --- /dev/null +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentralDownload.java @@ -0,0 +1,208 @@ +/* +* AMRIT – Accessible Medical Records via Integrated Technology +* Integrated EHR (Electronic Health Records) Solution +* +* Copyright (C) "Piramal Swasthya Management and Research Institute" +* +* This file is part of AMRIT. +* +* This program is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with this program. If not, see https://www.gnu.org/licenses/. +*/ +package com.iemr.mmu.service.dataSyncLayerCentral; + +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.stereotype.Service; + +/*** + * + * @author NE298657 + * + */ + +@Service +public class DataSyncRepositoryCentralDownload { + @Autowired + private DataSource dataSource; + + private JdbcTemplate jdbcTemplate; + + private JdbcTemplate getJdbcTemplate() { + return new JdbcTemplate(dataSource); + + } + + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + + // Data Upload Repository + public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, + String vanAutoIncColumnName, int syncFacilityID) { + jdbcTemplate = getJdbcTemplate(); + + List params = new ArrayList<>(); + + StringBuilder queryBuilder = new StringBuilder("SELECT "); + queryBuilder.append(vanAutoIncColumnName); + queryBuilder.append(" FROM "); + queryBuilder.append(schemaName+"."+tableName); + + //params.add(vanAutoIncColumnName); + //params.add(schemaName); + //params.add(tableName); + + StringBuilder whereClause = new StringBuilder(); + whereClause.append(" WHERE "); + whereClause.append("VanSerialNo = ?"); + params.add(vanSerialNo); + + if ((tableName.equalsIgnoreCase("t_patientissue") || tableName.equalsIgnoreCase("t_physicalstockentry") + || tableName.equalsIgnoreCase("t_stockadjustment") || tableName.equalsIgnoreCase("t_saitemmapping") + || tableName.equalsIgnoreCase("t_stocktransfer") || tableName.equalsIgnoreCase("t_patientreturn") + || tableName.equalsIgnoreCase("t_facilityconsumption") || tableName.equalsIgnoreCase("t_indent") + || tableName.equalsIgnoreCase("t_indentorder") || tableName.equalsIgnoreCase("t_indentissue") + || tableName.equalsIgnoreCase("t_itemstockentry") || tableName.equalsIgnoreCase("t_itemstockexit")) + && syncFacilityID > 0) { + + whereClause.append(" AND "); + whereClause.append("SyncFacilityID = ?"); + params.add(syncFacilityID); + + } + + else { + + whereClause.append(" AND "); + whereClause.append("VanID = ?"); + params.add(vanID); + + } + + queryBuilder.append(whereClause); + String query = queryBuilder.toString(); + Object[] queryParams = params.toArray(); + List> resultSet = jdbcTemplate.queryForList(query, queryParams); + if (resultSet != null && resultSet.size() > 0) + return 1; + else + return 0; + } + + // Method for synchronization of data to central DB + public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, + List syncDataList) { + jdbcTemplate = getJdbcTemplate(); + if (query.startsWith("INSERT")) { + for (int i = 0; i < syncDataList.size(); i++) { + Object[] array = syncDataList.get(i);// Arrey 1 + + if (query.startsWith("INSERT")) { +// array = new Object[] {serverColumns, array }; + syncDataList.set(i, array); + } + } + } else { + for (int i = 0; i < syncDataList.size(); i++) { + + Object[] array = syncDataList.get(i);// Arrey 1 + String[] columnsArray = null; + if(null != serverColumns) + columnsArray = serverColumns.split(","); // arrey 2 + + List Newarray = new ArrayList<>(); + + int arrayIndex = 0; + int columnsArrayIndex = 0; + //Newarray.add(schema); + //Newarray.add(tableName); + //while (columnsArrayIndex < columnsArray.length || arrayIndex < array.length) { + if (null != columnsArray && columnsArrayIndex < columnsArray.length) { + Newarray.add(columnsArray[columnsArrayIndex]); + columnsArrayIndex++; + } + + /* + * if (arrayIndex < array.length) { Newarray.add(array); arrayIndex++; } + */ + //} + + // Convert Newarray back to an array + //Object[] resultArray = Newarray.toArray(new Object[0]); + syncDataList.set(i, array); + + } + } + // start batch insert/update + int[] i = jdbcTemplate.batchUpdate(query, syncDataList); + return i; + + } + + // End of Data Upload Repository + + public List> getMasterDataFromTable(String schema, String table, String columnNames, + String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) throws Exception { + jdbcTemplate = getJdbcTemplate(); + List> resultSetList =new ArrayList<>(); + String baseQuery = ""; + if (masterType != null) { + if (lastDownloadDate != null) { + if (masterType.equalsIgnoreCase("A")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + + " WHERE LastModDate >= ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate); + + } + else if (masterType.equalsIgnoreCase("V")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + + " WHERE LastModDate >= ? AND VanID = ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate,vanID); + } + else if (masterType.equalsIgnoreCase("P")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + + " WHERE LastModDate >= ? AND ProviderServiceMapID = ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,lastDownloadDate,psmID); + } + } else { + if (masterType.equalsIgnoreCase("A")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table; + resultSetList = jdbcTemplate.queryForList(baseQuery); + } + else if (masterType.equalsIgnoreCase("V")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + " WHERE VanID = ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,vanID); + } + else if (masterType.equalsIgnoreCase("P")) { + baseQuery += " SELECT " + columnNames + " FROM " + schema + "." + table + + " WHERE ProviderServiceMapID = ? "; + resultSetList = jdbcTemplate.queryForList(baseQuery,psmID); + } + } + } + logger.info("Select query central: " + baseQuery); + logger.info("Last Downloaded Date " + lastDownloadDate); + logger.info("Result set Details: " + resultSetList); + return resultSetList; + } + + // End of Data Download Repository +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java index 6142d257..b1ed9103 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetMasterDataFromCentralForVanImpl.java @@ -39,6 +39,10 @@ public class GetMasterDataFromCentralForVanImpl implements GetMasterDataFromCentralForVan { @Autowired private DataSyncRepositoryCentral dataSyncRepositoryCentral; + + @Autowired + private DataSyncRepositoryCentralDownload dataSyncRepositoryCentralDownload; + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); public String getMasterDataForVan(SyncDownloadMaster obj) throws Exception { @@ -62,7 +66,7 @@ public String getMasterDataForVan(SyncDownloadMaster obj) throws Exception { private List> getMasterDataFromGivenTable(SyncDownloadMaster tableDetails) throws Exception { logger.info("ger master data="+ tableDetails.getSchemaName()); List> resultSetList = new ArrayList<>(); - resultSetList = dataSyncRepositoryCentral.getMasterDataFromTable(tableDetails.getSchemaName(), + resultSetList = dataSyncRepositoryCentralDownload.getMasterDataFromTable(tableDetails.getSchemaName(), tableDetails.getTableName(), tableDetails.getServerColumnName(), tableDetails.getMasterType(), tableDetails.getLastDownloadDate(), tableDetails.getVanID(), tableDetails.getProviderServiceMapID()); return resultSetList; From 3d3ce58c1ca2eb8cdad0b52011d88f6537ae9179 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 6 Aug 2025 13:38:56 +0530 Subject: [PATCH 14/45] fix: remove the validation (#105) --- .../GetDataFromVanAndSyncToDBImpl.java | 96 +++++++++---------- 1 file changed, 48 insertions(+), 48 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 2d88e6f6..5f67b89f 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -98,10 +98,10 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t String syncTableName = syncUploadDataDigester.getTableName(); String schemaName = syncUploadDataDigester.getSchemaName(); - if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { - logger.error("Invalid schema or table name provided: Schema='{}', Table='{}'.", schemaName, syncTableName); - return "Error: Invalid schema or table name."; - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { + // logger.error("Invalid schema or table name provided: Schema='{}', Table='{}'.", schemaName, syncTableName); + // return "Error: Invalid schema or table name."; + // } // Handle specific tables first, if their logic is distinct @@ -192,10 +192,10 @@ private boolean syncTablesInGroup(String schemaName, String currentTableName, Sy logger.info("Attempting generic sync for table: {}", currentTableName); // Validate schemaName and currentTableName for safety before proceeding - if (!isValidSchemaName(schemaName) || !isValidTableName(currentTableName)) { - logger.error("Invalid schema or table name for group sync: Schema='{}', Table='{}'.", schemaName, currentTableName); - return false; // Fail fast if identifiers are invalid - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(currentTableName)) { + // logger.error("Invalid schema or table name for group sync: Schema='{}', Table='{}'.", schemaName, currentTableName); + // return false; // Fail fast if identifiers are invalid + // } SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); @@ -217,10 +217,10 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUpload String schemaName = syncUploadDataDigester.getSchemaName(); String tableName = syncUploadDataDigester.getTableName(); - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name provided for m_beneficiaryregidmapping update: Schema='{}', Table='{}'.", schemaName, tableName); - return "Error: Invalid schema or table name."; - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name provided for m_beneficiaryregidmapping update: Schema='{}', Table='{}'.", schemaName, tableName); + // return "Error: Invalid schema or table name."; + // } List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); @@ -268,10 +268,10 @@ public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDat String schemaName = syncUploadDataDigester.getSchemaName(); String tableName = syncUploadDataDigester.getTableName(); - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name provided for i_beneficiarydetails update: Schema='{}', Table='{}'.", schemaName, tableName); - return "Error: Invalid schema or table name."; - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name provided for i_beneficiarydetails update: Schema='{}', Table='{}'.", schemaName, tableName); + // return "Error: Invalid schema or table name."; + // } List syncData = new ArrayList<>(); // This list will hold data for batch updates to 'Processed' @@ -353,10 +353,10 @@ public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDat } private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name for getQueryFor_I_BeneficiaryDetails: Schema='{}', Table='{}'.", schemaName, tableName); - throw new IllegalArgumentException("Invalid schema or table name provided."); - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name for getQueryFor_I_BeneficiaryDetails: Schema='{}', Table='{}'.", schemaName, tableName); + // throw new IllegalArgumentException("Invalid schema or table name provided."); + // } return String.format("UPDATE %s.%s SET Processed = 'P', SyncedDate = now(), SyncedBy = ? WHERE BeneficiaryDetailsId = ? AND VanID = ?", schemaName, tableName); } @@ -370,15 +370,15 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); String serverColumns = syncUploadDataDigester.getServerColumns(); - if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { - logger.error("Invalid schema or table name for generic sync: Schema='{}', Table='{}'.", schemaName, syncTableName); - return false; - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { + // logger.error("Invalid schema or table name for generic sync: Schema='{}', Table='{}'.", schemaName, syncTableName); + // return false; + // } - if (!isValidColumnNames(serverColumns)) { - logger.error("Invalid server columns provided for generic sync: {}", serverColumns); - return false; - } + // if (!isValidColumnNames(serverColumns)) { + // logger.error("Invalid server columns provided for generic sync: {}", serverColumns); + // return false; + // } List> dataToBesync = syncUploadDataDigester.getSyncData(); @@ -538,14 +538,14 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name for getQueryToInsertDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); - throw new IllegalArgumentException("Invalid schema or table name provided."); - } - if (!isValidColumnNames(serverColumns)) { - logger.error("Invalid server columns provided for getQueryToInsertDataToServerDB: {}", serverColumns); - throw new IllegalArgumentException("Invalid column names provided."); - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name for getQueryToInsertDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); + // throw new IllegalArgumentException("Invalid schema or table name provided."); + // } + // if (!isValidColumnNames(serverColumns)) { + // logger.error("Invalid server columns provided for getQueryToInsertDataToServerDB: {}", serverColumns); + // throw new IllegalArgumentException("Invalid column names provided."); + // } String[] columnsArr = serverColumns.split(","); @@ -562,24 +562,24 @@ private String getQueryToInsertDataToServerDB(String schemaName, String tableNam } public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - logger.error("Invalid schema or table name for getQueryToUpdateDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); - throw new IllegalArgumentException("Invalid schema or table name provided."); - } - if (!isValidColumnNames(serverColumns)) { - logger.error("Invalid server columns provided for getQueryToUpdateDataToServerDB: {}", serverColumns); - throw new IllegalArgumentException("Invalid column names provided."); - } + // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { + // logger.error("Invalid schema or table name for getQueryToUpdateDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); + // throw new IllegalArgumentException("Invalid schema or table name provided."); + // } + // if (!isValidColumnNames(serverColumns)) { + // logger.error("Invalid server columns provided for getQueryToUpdateDataToServerDB: {}", serverColumns); + // throw new IllegalArgumentException("Invalid column names provided."); + // } String[] columnsArr = serverColumns.split(","); StringBuilder preparedStatementSetter = new StringBuilder(); for (int i = 0; i < columnsArr.length; i++) { String column = columnsArr[i].trim(); - if (!isValidColumnName(column)) { - logger.error("Invalid individual column name encountered: {}", column); - throw new IllegalArgumentException("Invalid individual column name provided: " + column); - } + // if (!isValidColumnName(column)) { + // logger.error("Invalid individual column name encountered: {}", column); + // throw new IllegalArgumentException("Invalid individual column name provided: " + column); + // } preparedStatementSetter.append(column); preparedStatementSetter.append(" = ?"); From 4c85e25b8da1ea3c1be485443dbe3292bfc42dd8 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 6 Aug 2025 19:14:03 +0530 Subject: [PATCH 15/45] fix: replace the old working code (#106) --- .../GetDataFromVanAndSyncToDBImpl.java | 379 +++++++----------- 1 file changed, 144 insertions(+), 235 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 5f67b89f..69be169f 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -27,8 +27,6 @@ import java.util.List; import java.util.Map; import java.util.HashMap; -import java.util.Set; -import java.util.HashSet; import org.slf4j.Logger; @@ -50,22 +48,8 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB private DataSyncRepositoryCentral dataSyncRepositoryCentral; private static final Map> TABLE_GROUPS = new HashMap<>(); - private static final Set VALID_SCHEMAS = new HashSet<>(Arrays.asList("public", "db_iemr")); // Add your actual schema names - private static final Set VALID_TABLES = new HashSet<>(Arrays.asList( - "m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping", - "t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity", - "t_phy_generalexam","t_phy_headtotoe","t_sys_obstetric","t_sys_gastrointestinal","t_sys_cardiovascular","t_sys_respiratory","t_sys_centralnervous","t_sys_musculoskeletalsystem","t_sys_genitourinarysystem", - "t_ancdiagnosis","t_ncddiagnosis","t_pncdiagnosis","t_benchefcomplaint","t_benclinicalobservation","t_prescription","t_prescribeddrug","t_lab_testorder","t_benreferdetails", - "t_lab_testresult","t_physicalstockentry","t_patientissue","t_facilityconsumption","t_itemstockentry","t_itemstockexit", - "t_benmedhistory","t_femaleobstetrichistory","t_benmenstrualdetails","t_benpersonalhabit","t_childvaccinedetail1","t_childvaccinedetail2","t_childoptionalvaccinedetail","t_ancwomenvaccinedetail","t_childfeedinghistory","t_benallergyhistory","t_bencomorbiditycondition","t_benmedicationhistory","t_benfamilyhistory","t_perinatalhistory","t_developmenthistory", - "t_cancerfamilyhistory","t_cancerpersonalhistory","t_cancerdiethistory","t_cancerobstetrichistory","t_cancervitals","t_cancersignandsymptoms","t_cancerlymphnode","t_canceroralexamination","t_cancerbreastexamination","t_cancerabdominalexamination","t_cancergynecologicalexamination","t_cancerdiagnosis","t_cancerimageannotation", - "i_beneficiaryimage", - "t_stockadjustment","t_stocktransfer","t_patientreturn","t_indent","t_indentissue","t_indentorder","t_saitemmapping" - )); - static { - - TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); + TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); TABLE_GROUPS.put(2, Arrays.asList("t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity")); @@ -87,6 +71,7 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB public String syncDataToServer(String requestOBJ, String Authorization, String token) throws Exception { logger.info("Starting syncDataToServer. Token: {}", token); + ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); @@ -96,13 +81,6 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t } String syncTableName = syncUploadDataDigester.getTableName(); - String schemaName = syncUploadDataDigester.getSchemaName(); - - // if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { - // logger.error("Invalid schema or table name provided: Schema='{}', Table='{}'.", schemaName, syncTableName); - // return "Error: Invalid schema or table name."; - // } - // Handle specific tables first, if their logic is distinct if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { @@ -113,8 +91,7 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } - if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { + } else if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); if ("data sync passed".equals(result)) { return "Sync successful for i_beneficiarydetails."; @@ -151,20 +128,22 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t List tablesInGroup = entry.getValue(); logger.info("Starting sync for Group {}", groupId); for (String table : tablesInGroup) { - if (!isValidTableName(table)) { - logger.error("Invalid table name '{}' encountered in group {}. Skipping.", table, groupId); - syncSuccess = false; - errorMessage += "Invalid table name: " + table + " in Group " + groupId + ". "; - continue; // Skip this table - } - try { - + // Create a new digester for each table within the group, + // or adapt if the original digester contains data for multiple tables. + // For simplicity, assuming syncDataDigester needs to be tailored per table or group. + // If your requestOBJ contains data for only one table at a time, this loop might need adjustment + // to fetch data for each table in the group. + // For now, it will use the syncData from the original requestOBJ, which implies + // the original requestOBJ should represent data for a single table. + // A more robust solution would involve fetching data for each table dynamically. boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), table, syncUploadDataDigester); if (!currentTableSyncResult) { syncSuccess = false; errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, errorMessage); + // Optionally, you can choose to break here or continue to sync other tables in the group/next group + // For now, let's continue to attempt other tables within the group. } else { logger.info("Successfully synced table: {} in Group {}", table, groupId); } @@ -172,31 +151,34 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t syncSuccess = false; errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + e.getMessage() + ". "; logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, e.getMessage(), e); + // Continue to attempt other tables } } } } if (syncSuccess) { - logger.info("Overall data sync passed."); return "Overall data sync passed."; } else { - logger.info("Overall data sync failed. Details: " + errorMessage); return "Overall data sync failed. Details: " + errorMessage; } } } - + /** + * Helper method to sync tables belonging to a specific group. + * This method assumes that the `syncUploadDataDigester` will be populated + * with relevant data for the `currentTableName` before calling this. + * In a real-world scenario, you might fetch data for each table here. + */ private boolean syncTablesInGroup(String schemaName, String currentTableName, SyncUploadDataDigester originalDigester) { logger.info("Attempting generic sync for table: {}", currentTableName); - - // Validate schemaName and currentTableName for safety before proceeding - // if (!isValidSchemaName(schemaName) || !isValidTableName(currentTableName)) { - // logger.error("Invalid schema or table name for group sync: Schema='{}', Table='{}'.", schemaName, currentTableName); - // return false; // Fail fast if identifiers are invalid - // } + // This is a simplification. In a production system, you would likely need + // to retrieve the actual data for 'currentTableName' from the local DB + // based on the group sync approach. For this example, we'll assume the + // originalDigester's syncData is relevant or needs to be re-populated. + // Create a new digester instance or modify the existing one for the current table SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); tableSpecificDigester.setTableName(currentTableName); @@ -205,7 +187,12 @@ private boolean syncTablesInGroup(String schemaName, String currentTableName, Sy tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is generic or set per table - tableSpecificDigester.setSyncData(originalDigester.getSyncData()); // Placeholder: Replace with actual data fetching + // !!! IMPORTANT: You'll need to fetch the data for 'currentTableName' from your local DB here. + // The `originalDigester.getSyncData()` might not be correct for all tables in a group. + // For demonstration, I'm just using the original digester's data, which is likely incorrect + // if you're syncing multiple tables from a single request. + // You'll need a method like: dataSyncRepositoryLocal.getDataForTable(currentTableName, ...) + tableSpecificDigester.setSyncData(originalDigester.getSyncData()); // Placeholder: Replace with actual data fetching return performGenericTableSync(tableSpecificDigester); } @@ -213,19 +200,11 @@ private boolean syncTablesInGroup(String schemaName, String currentTableName, Sy private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUploadDataDigester syncUploadDataDigester) { logger.info("Processing update_M_BeneficiaryRegIdMapping_for_provisioned_benID for table: {}", syncUploadDataDigester.getTableName()); - - String schemaName = syncUploadDataDigester.getSchemaName(); - String tableName = syncUploadDataDigester.getTableName(); - - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name provided for m_beneficiaryregidmapping update: Schema='{}', Table='{}'.", schemaName, tableName); - // return "Error: Invalid schema or table name."; - // } - List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); - String query = String.format("UPDATE %s.%s SET Provisioned = true, SyncedDate = now(), SyncedBy = ? WHERE BenRegId = ? AND BeneficiaryID = ? AND VanID = ?", schemaName, tableName); + String query = getqueryFor_M_BeneficiaryRegIdMapping(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); for (Map map : dataToBesync) { if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { @@ -242,8 +221,8 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUpload if (!syncData.isEmpty()) { try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, - tableName, SERVER_COLUMNS_NOT_REQUIRED, query, syncData); + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName(), SERVER_COLUMNS_NOT_REQUIRED, query, syncData); if (i.length == syncData.size()) { logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); @@ -262,125 +241,76 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUpload } } - + private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String tableName) { + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append("Provisioned = true, SyncedDate = now(), syncedBy = ?"); + queryBuilder.append(" WHERE "); + queryBuilder.append(" BenRegId = ? "); + queryBuilder.append(" AND "); + queryBuilder.append(" BeneficiaryID = ? "); + queryBuilder.append(" AND "); + queryBuilder.append(" VanID = ? "); + return queryBuilder.toString(); + } + public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { logger.info("Processing update_I_BeneficiaryDetails_for_processed_in_batches for table: {}", syncUploadDataDigester.getTableName()); - String schemaName = syncUploadDataDigester.getSchemaName(); - String tableName = syncUploadDataDigester.getTableName(); - - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name provided for i_beneficiarydetails update: Schema='{}', Table='{}'.", schemaName, tableName); - // return "Error: Invalid schema or table name."; - // } - - List syncData = new ArrayList<>(); // This list will hold data for batch updates to 'Processed' - - String updateQuery = getQueryFor_I_BeneficiaryDetails(schemaName, tableName); - - int limit = 1000; - int offset = 0; - int totalProcessed = 0; - - String whereClauseForBatchFetch = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // This is for fetching, not for update - - while (true) { - List> batchToFetch; - try { - batchToFetch = dataSyncRepositoryCentral.getBatchForBenDetails( - syncUploadDataDigester, - whereClauseForBatchFetch, - limit, - offset); - } catch (Exception e) { - logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); - return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); - } - - if (batchToFetch.isEmpty()) { - break; - } - - for (Map map : batchToFetch) { - if (map.get("BeneficiaryDetailsId") != null && map.get("VanID") != null) { - Object[] params = new Object[3]; - params[0] = syncUploadDataDigester.getSyncedBy(); - params[1] = String.valueOf(map.get("BeneficiaryDetailsId")); - params[2] = String.valueOf(map.get("VanID")); - syncData.add(params); - } else { - logger.warn("Skipping record in i_beneficiarydetails due to missing BeneficiaryDetailsId or VanID: {}", map); - } - } - - if (!syncData.isEmpty()) { - try { - int[] batchUpdateResults = dataSyncRepositoryCentral.syncDataToCentralDB( - schemaName, - tableName, - SERVER_COLUMNS_NOT_REQUIRED, - updateQuery, - syncData); - - int successfulUpdates = 0; - for (int result : batchUpdateResults) { - if (result >= 1) { - successfulUpdates++; - } - } - totalProcessed += successfulUpdates; - logger.info("Batch update for i_beneficiarydetails: {} records processed, {} successfully updated.", syncData.size(), successfulUpdates); - - syncData.clear(); - offset += limit; - - } catch (Exception e) { - logger.error("Exception during batch update for i_beneficiarydetails: {}", e.getMessage(), e); - return "Error during sync for i_beneficiarydetails: " + e.getMessage(); - } - } else { - logger.info("No valid records in the current batch for i_beneficiarydetails to update."); - offset += limit; - } + List syncData = new ArrayList<>(); + + String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); + + int limit = 1000; + int offset = 0; + int totalProcessed = 0; + + String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly + + while (true) { + List> batch; + try { + // *** ADD THIS LINE *** + logger.info("DEBUG: Passing whereClause to getBatchForBenDetails: [{}]", problematicWhereClause); + + batch = dataSyncRepositoryCentral.getBatchForBenDetails( + syncUploadDataDigester, + problematicWhereClause, + limit, + offset); + } catch (Exception e) { + logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); } - - if (totalProcessed > 0) { + + if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a "success" logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); return "data sync passed"; } else { - logger.info("No records were processed for i_beneficiarydetails."); - return "No data processed for i_beneficiarydetails."; + logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); + return "No data processed or sync failed for i_beneficiarydetails."; } } - + } private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name for getQueryFor_I_BeneficiaryDetails: Schema='{}', Table='{}'.", schemaName, tableName); - // throw new IllegalArgumentException("Invalid schema or table name provided."); - // } - return String.format("UPDATE %s.%s SET Processed = 'P', SyncedDate = now(), SyncedBy = ? WHERE BeneficiaryDetailsId = ? AND VanID = ?", schemaName, tableName); + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append("Processed = 'P', SyncedDate = now(), SyncedBy = ? "); + queryBuilder.append(" WHERE "); + queryBuilder.append("BeneficiaryDetailsId = ? "); + queryBuilder.append(" AND "); + queryBuilder.append("VanID = ? "); + return queryBuilder.toString(); } - + /** + * Handles the generic synchronization logic for tables not covered by specific handlers. + */ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { logger.info("Performing generic sync for table: {}", syncUploadDataDigester.getTableName()); - - String schemaName = syncUploadDataDigester.getSchemaName(); - String syncTableName = syncUploadDataDigester.getTableName(); - String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); - String serverColumns = syncUploadDataDigester.getServerColumns(); - - // if (!isValidSchemaName(schemaName) || !isValidTableName(syncTableName)) { - // logger.error("Invalid schema or table name for generic sync: Schema='{}', Table='{}'.", schemaName, syncTableName); - // return false; - // } - - // if (!isValidColumnNames(serverColumns)) { - // logger.error("Invalid server columns provided for generic sync: {}", serverColumns); - // return false; - // } - - List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncDataListInsert = new ArrayList<>(); List syncDataListUpdate = new ArrayList<>(); @@ -390,6 +320,9 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig return true; // Nothing to sync, consider it a success } + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String schemaName = syncUploadDataDigester.getSchemaName(); Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); for (Map map : dataToBesync) { @@ -397,10 +330,13 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanID = String.valueOf(map.get("VanID")); int syncFacilityID = 0; + // Update SyncedBy and SyncedDate in the map itself before processing map.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); map.put("SyncedDate", String.valueOf(LocalDateTime.now())); // Ensure column name matches DB + // Facility ID processing if (facilityIDFromDigester != null) { + // Determine the 'Processed' status based on facility ID for specific tables switch (syncTableName.toLowerCase()) { case "t_indent": case "t_indentorder": { @@ -464,7 +400,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig // Prepare Object array for insert/update Object[] objArr; - List serverColumnsList = Arrays.asList(serverColumns.split(",")); + List serverColumnsList = Arrays.asList(syncUploadDataDigester.getServerColumns().split(",")); List currentRecordValues = new ArrayList<>(); for (String column : serverColumnsList) { @@ -504,9 +440,9 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig boolean updateSuccess = true; if (!syncDataListInsert.isEmpty()) { - String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns()); try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns(), queryInsert, syncDataListInsert); if (i.length != syncDataListInsert.size()) { insertSuccess = false; logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", syncTableName, syncDataListInsert.size(), i.length, getFailedRecords(i, syncDataListInsert)); @@ -520,7 +456,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } if (!syncDataListUpdate.isEmpty()) { - String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, syncUploadDataDigester.getServerColumns(), syncTableName); try { int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); if (j.length != syncDataListUpdate.size()) { @@ -538,59 +474,53 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name for getQueryToInsertDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); - // throw new IllegalArgumentException("Invalid schema or table name provided."); - // } - // if (!isValidColumnNames(serverColumns)) { - // logger.error("Invalid server columns provided for getQueryToInsertDataToServerDB: {}", serverColumns); - // throw new IllegalArgumentException("Invalid column names provided."); - // } + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); - - String[] columnsArr = serverColumns.split(","); StringBuilder preparedStatementSetter = new StringBuilder(); - for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append("?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } } } - return String.format("INSERT INTO %s.%s(%s) VALUES (%s)", schemaName, tableName, serverColumns, preparedStatementSetter.toString()); + StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append("("); + queryBuilder.append(serverColumns); + queryBuilder.append(") VALUES ("); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(")"); + return queryBuilder.toString(); } public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { - // if (!isValidSchemaName(schemaName) || !isValidTableName(tableName)) { - // logger.error("Invalid schema or table name for getQueryToUpdateDataToServerDB: Schema='{}', Table='{}'.", schemaName, tableName); - // throw new IllegalArgumentException("Invalid schema or table name provided."); - // } - // if (!isValidColumnNames(serverColumns)) { - // logger.error("Invalid server columns provided for getQueryToUpdateDataToServerDB: {}", serverColumns); - // throw new IllegalArgumentException("Invalid column names provided."); - // } - - String[] columnsArr = serverColumns.split(","); + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); + StringBuilder preparedStatementSetter = new StringBuilder(); - for (int i = 0; i < columnsArr.length; i++) { - String column = columnsArr[i].trim(); - // if (!isValidColumnName(column)) { - // logger.error("Invalid individual column name encountered: {}", column); - // throw new IllegalArgumentException("Invalid individual column name provided: " + column); - // } - - preparedStatementSetter.append(column); - preparedStatementSetter.append(" = ?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append(columnsArr[i].trim()); + preparedStatementSetter.append(" = ?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } } } - StringBuilder queryBuilder = new StringBuilder(); - queryBuilder.append(String.format("UPDATE %s.%s SET %s WHERE VanSerialNo = ?", schemaName, tableName, preparedStatementSetter.toString())); - + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(" WHERE VanSerialNo = ? "); if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", @@ -603,41 +533,20 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol return queryBuilder.toString(); } - private boolean isValidSchemaName(String schemaName) { - return VALID_SCHEMAS.contains(schemaName.toLowerCase()); - } - - private boolean isValidTableName(String tableName) { - return VALID_TABLES.contains(tableName.toLowerCase()); - } - - private boolean isValidColumnName(String columnName) { - return columnName != null && columnName.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); - } - - private boolean isValidColumnNames(String columnNames) { - if (columnNames == null || columnNames.trim().isEmpty()) { - return false; - } - String[] cols = columnNames.split(","); - for (String col : cols) { - if (!isValidColumnName(col.trim())) { - return false; - } - } - return true; - } - - + // Helper to get information about failed records (for logging purposes) private String getFailedRecords(int[] results, List data) { List failedRecordsInfo = new ArrayList<>(); for (int k = 0; k < results.length; k++) { - if (results[k] < 1) { - String idInfo = "N/A"; - if (data.get(k) != null && data.get(k).length > 0) { - idInfo = "Record data size: " + data.get(k).length; + // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or Statement.SUCCESS_NO_INFO + // usually indicates a failure or success without specific row count. + // A common return value for success is 1 (for one row updated/inserted). + if (results[k] < 1) { // Assuming 1 means success, and anything else (0, -2, etc.) means failure + // Attempt to get some identifiable info from the failed record + if (data.get(k).length > 0) { + failedRecordsInfo.add("Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); + } else { + failedRecordsInfo.add("Record at index " + k + " (No identifiable info)"); } - failedRecordsInfo.add("Record at index " + k + " (Info: " + idInfo + ")"); } } return String.join("; ", failedRecordsInfo); From 9c97e6d9e27545f330ea94858bb4e6e8738f4610 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Fri, 8 Aug 2025 09:13:31 +0530 Subject: [PATCH 16/45] Fix the datasync upload issue (#107) * fix: add the schemas * fix: remove logger * fix: revert the old code for repository --- .../dataSyncActivity/DataSyncRepository.java | 7 +- .../UploadDataToServerImpl.java | 1 + .../DataSyncRepositoryCentral.java | 234 +++++++++--------- .../GetDataFromVanAndSyncToDBImpl.java | 24 +- 4 files changed, 134 insertions(+), 132 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index 52fcf94a..b039a47f 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -64,7 +64,7 @@ public List> getDataForGivenSchemaAndTable(String schema, St if (table != null && table.equalsIgnoreCase("m_beneficiaryregidmapping")) { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE provisioned is true AND processed != 'P' AND vanID is not null "; + + " WHERE provisioned is true AND processed <> 'P' AND vanID is not null "; } else { if (table != null && (table.equalsIgnoreCase("t_patientissue") || table.equalsIgnoreCase("t_physicalstockentry") || table.equalsIgnoreCase("t_stockadjustment") @@ -75,15 +75,14 @@ public List> getDataForGivenSchemaAndTable(String schema, St || table.equalsIgnoreCase("t_itemstockexit"))) { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE processed != 'P' AND SyncFacilityID is not null "; + + " WHERE processed <> 'P' AND SyncFacilityID is not null "; } else { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE processed != 'P' AND vanID is not null "; + + " WHERE processed <> 'P' AND vanID is not null "; } } - resultSetList = jdbcTemplate.queryForList(baseQuery); return resultSetList; } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index b65fe6e5..9476ecc7 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -299,6 +299,7 @@ public List getVanAndServerColumnList(Integer groupID) throws private List> getDataToSync(String schemaName, String tableName, String columnNames) throws Exception { + logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); List> resultSetList = dataSyncRepository.getDataForGivenSchemaAndTable(schemaName, tableName, columnNames); if (resultSetList != null) { diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java index 175de980..bdba82d0 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java @@ -21,21 +21,19 @@ */ package com.iemr.mmu.service.dataSyncLayerCentral; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.sql.DataSource; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Service; -import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.sql.Statement; // Import Statement for batchUpdate result interpretation +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; @Service public class DataSyncRepositoryCentral { @@ -44,6 +42,7 @@ public class DataSyncRepositoryCentral { private JdbcTemplate jdbcTemplate; + // Lazily initialize jdbcTemplate to ensure DataSource is available private JdbcTemplate getJdbcTemplate() { if (this.jdbcTemplate == null) { this.jdbcTemplate = new JdbcTemplate(dataSource); @@ -51,170 +50,161 @@ private JdbcTemplate getJdbcTemplate() { return this.jdbcTemplate; } - private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); - - private static final Set VALID_SCHEMAS = Set.of("public", "db_iemr"); - - private static final Set VALID_TABLES = Set.of( - "m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", "i_beneficiarycontacts", - "i_beneficiarydetails", "i_beneficiaryfamilymapping", "i_beneficiaryidentity", "i_beneficiarymapping", - "t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", "t_pnccare", - "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", "t_physicalactivity", - "t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", "t_sys_cardiovascular", - "t_sys_respiratory", "t_sys_centralnervous", "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem", - "t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", "t_benclinicalobservation", - "t_prescription", "t_prescribeddrug", "t_lab_testorder", "t_benreferdetails", - "t_lab_testresult", "t_physicalstockentry", "t_patientissue", "t_facilityconsumption", "t_itemstockentry", - "t_itemstockexit", "t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", - "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", - "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", - "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory", - "t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", "t_cancerobstetrichistory", - "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", "t_canceroralexamination", - "t_cancerbreastexamination", "t_cancerabdominalexamination", "t_cancergynecologicalexamination", - "t_cancerdiagnosis", "t_cancerimageannotation", "i_beneficiaryimage", "t_stockadjustment", - "t_stocktransfer", "t_patientreturn", "t_indent", "t_indentissue", "t_indentorder", "t_saitemmapping" - ); - - private boolean isValidDatabaseIdentifierCharacter(String identifier) { - return identifier != null && identifier.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); - } - - private boolean isValidSchemaName(String schemaName) { - return VALID_SCHEMAS.contains(schemaName.toLowerCase()); - } - - private boolean isValidTableName(String tableName) { - return VALID_TABLES.contains(tableName.toLowerCase()); - } - - private boolean isValidColumnNamesList(String columnNames) { - if (columnNames == null || columnNames.trim().isEmpty()) { - return false; - } - for (String col : columnNames.split(",")) { - if (!isValidDatabaseIdentifierCharacter(col.trim())) { - return false; - } - } - return true; - } + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + // Data Upload Repository public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, String vanAutoIncColumnName, int syncFacilityID) { jdbcTemplate = getJdbcTemplate(); - List params = new ArrayList<>(); - if (!isValidSchemaName(schemaName) || !isValidTableName(tableName) || - !isValidDatabaseIdentifierCharacter(vanAutoIncColumnName)) { - logger.error("Invalid identifiers: schema={}, table={}, column={}", schemaName, tableName, vanAutoIncColumnName); - throw new IllegalArgumentException("Invalid identifiers provided."); - } + List params = new ArrayList<>(); - StringBuilder queryBuilder = new StringBuilder("SELECT ") - .append(vanAutoIncColumnName).append(" FROM ") - .append(schemaName).append(".").append(tableName).append(" WHERE VanSerialNo = ?"); + StringBuilder queryBuilder = new StringBuilder("SELECT "); + queryBuilder.append(vanAutoIncColumnName); + queryBuilder.append(" FROM "); + queryBuilder.append(schemaName).append(".").append(tableName); + StringBuilder whereClause = new StringBuilder(); + whereClause.append(" WHERE "); + whereClause.append("VanSerialNo = ?"); params.add(vanSerialNo); - if (List.of("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit").contains(tableName.toLowerCase()) && syncFacilityID > 0) { - queryBuilder.append(" AND SyncFacilityID = ?"); + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(tableName.toLowerCase()) && syncFacilityID > 0) { + + whereClause.append(" AND "); + whereClause.append("SyncFacilityID = ?"); params.add(syncFacilityID); + } else { - queryBuilder.append(" AND VanID = ?"); + whereClause.append(" AND "); + whereClause.append("VanID = ?"); params.add(vanID); } + queryBuilder.append(whereClause); + String query = queryBuilder.toString(); + Object[] queryParams = params.toArray(); + + logger.debug("Checking record existence query: {} with params: {}", query, Arrays.toString(queryParams)); + System.out.println("Checking record existence query: " + query + " with params: " + Arrays.toString(queryParams)); + try { - List> resultSet = jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); - return (resultSet != null && !resultSet.isEmpty()) ? 1 : 0; + List> resultSet = jdbcTemplate.queryForList(query, queryParams); + if (resultSet != null && !resultSet.isEmpty()) { + System.out.println("Record found for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); + logger.debug("Record found for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); + return 1; + } else { + System.out.println("No record found for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); + logger.debug("No record found for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); + return 0; + } + } catch (org.springframework.dao.EmptyResultDataAccessException e) { + System.out.println("No record found (EmptyResultDataAccessException) for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); + logger.debug("No record found (EmptyResultDataAccessException) for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); + return 0; } catch (Exception e) { - logger.error("Error checking record presence: {}", e.getMessage(), e); - throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); + System.out.println("Database error during checkRecordIsAlreadyPresentOrNot for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); + logger.error("Database error during checkRecordIsAlreadyPresentOrNot for table {}: VanSerialNo={}, VanID={}. Error: {}", tableName, vanSerialNo, vanID, e.getMessage(), e); + throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); // Re-throw or handle as appropriate } } + // Method for synchronization of data to central DB public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, List syncDataList) { jdbcTemplate = getJdbcTemplate(); + logger.info("Executing batch operation for table: {}. Query type: {}. Number of records: {}", tableName, query.startsWith("INSERT") ? "INSERT" : "UPDATE", syncDataList.size()); + logger.debug("Query: {}", query); +System.out.println("Executing batch operation for table: " + tableName + ". Query type: " + (query.startsWith("INSERT") ? "INSERT" : "UPDATE") + ". Number of records: " + syncDataList.size()); try { - return jdbcTemplate.batchUpdate(query, syncDataList); + // Start batch insert/update + int[] i = jdbcTemplate.batchUpdate(query, syncDataList); + System.out.println("Batch operation completed for table " + tableName + ". Results: " + Arrays.toString(i)); + logger.info("Batch operation completed for table {}. Results: {}", tableName, Arrays.toString(i)); + return i; } catch (Exception e) { - logger.error("Batch sync failed for table {}: {}", tableName, e.getMessage(), e); - throw new RuntimeException("Batch sync failed: " + e.getMessage(), e); + logger.error("Exception during batch update for table {}: {}", tableName, e.getMessage(), e); + System.out.println("Exception during batch update for table " + tableName + ": " + e.getMessage()); + // Log the error with detailed information + // Re-throw the exception to be handled by the service layer, so specific errors can be captured. + throw new RuntimeException("Batch sync failed for table " + tableName + ": " + e.getMessage(), e); } } + // End of Data Upload Repository + public List> getMasterDataFromTable(String schema, String table, String columnNames, - String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) { + String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) throws Exception { jdbcTemplate = getJdbcTemplate(); + List> resultSetList = new ArrayList<>(); + StringBuilder baseQueryBuilder = new StringBuilder(" SELECT ").append(columnNames).append(" FROM ").append(schema).append(".").append(table); List params = new ArrayList<>(); - if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { - throw new IllegalArgumentException("Invalid schema, table, or column names."); - } - - StringBuilder queryBuilder = new StringBuilder("SELECT ").append(columnNames) - .append(" FROM ").append(schema).append(".").append(table); - if (masterType != null) { if (lastDownloadDate != null) { - queryBuilder.append(" WHERE LastModDate >= ?"); + baseQueryBuilder.append(" WHERE LastModDate >= ? "); params.add(lastDownloadDate); - if ("V".equalsIgnoreCase(masterType)) { - queryBuilder.append(" AND VanID = ?"); + if (masterType.equalsIgnoreCase("V")) { + baseQueryBuilder.append(" AND VanID = ? "); params.add(vanID); - } else if ("P".equalsIgnoreCase(masterType)) { - queryBuilder.append(" AND ProviderServiceMapID = ?"); + } else if (masterType.equalsIgnoreCase("P")) { + baseQueryBuilder.append(" AND ProviderServiceMapID = ? "); params.add(psmID); } } else { - queryBuilder.append(" WHERE "); - if ("V".equalsIgnoreCase(masterType)) { - queryBuilder.append("VanID = ?"); + if (masterType.equalsIgnoreCase("V")) { + baseQueryBuilder.append(" WHERE VanID = ? "); params.add(vanID); - } else if ("P".equalsIgnoreCase(masterType)) { - queryBuilder.append("ProviderServiceMapID = ?"); + } else if (masterType.equalsIgnoreCase("P")) { + baseQueryBuilder.append(" WHERE ProviderServiceMapID = ? "); params.add(psmID); } } } + String finalQuery = baseQueryBuilder.toString(); + logger.info("Select query central: {}", finalQuery); + logger.info("Last Downloaded Date: {}", lastDownloadDate); + logger.info("Query Params: {}", params); + System.out.println("Select query central: " + finalQuery); + System.out.println("Last Downloaded Date: " + lastDownloadDate); + System.out.println("Query Params: " + params); + try { - // Safe dynamic SQL: All dynamic parts (table names, columns, etc.) are validated or hardcoded. - // Parameter values are bound safely using prepared statement placeholders (?). - return jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); + if (params.isEmpty()) { + resultSetList = jdbcTemplate.queryForList(finalQuery); + } else { + resultSetList = jdbcTemplate.queryForList(finalQuery, params.toArray()); + } } catch (Exception e) { - logger.error("Error fetching master data: {}", e.getMessage(), e); + System.out.println("Error fetching master data from table " + table + ": " + e.getMessage()); + logger.error("Error fetching master data from table {}: {}", table, e.getMessage(), e); throw new RuntimeException("Failed to fetch master data: " + e.getMessage(), e); } +System.out.println("Result set Details size: " + resultSetList.size()); + logger.info("Result set Details size: {}", resultSetList.size()); + return resultSetList; } - public List> getBatchForBenDetails(SyncUploadDataDigester digester, + public List> getBatchForBenDetails(String schema, String table, String columnNames, String whereClause, int limit, int offset) { - jdbcTemplate = getJdbcTemplate(); - -String schema = digester.getSchemaName(); - String table = digester.getTableName(); - String columnNames = digester.getServerColumns(); - - - if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { - throw new IllegalArgumentException("Invalid schema, table, or column names."); - } - // Safe dynamic SQL: Schema, table, and column names are validated against predefined whitelists. - // Only trusted values are used in the query string. - // limit and offset are passed as parameters to prevent SQL injection. - String query = String.format("SELECT %s FROM %s.%s %s LIMIT ? OFFSET ?", columnNames, schema, table, whereClause); //NOSONAR - - try { - return jdbcTemplate.queryForList(query, limit, offset); - } catch (Exception e) { - logger.error("Error fetching batch details: {}", e.getMessage(), e); - throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); - } + jdbcTemplate = getJdbcTemplate(); + String query = "SELECT " + columnNames + " FROM " + schema + "." + table + whereClause + " LIMIT ? OFFSET ?"; + System.out.println("Fetching batch for beneficiary details. Query: " + query + ", Limit: " + limit + ", Offset: " + offset); + logger.debug("Fetching batch for beneficiary details. Query: {}, Limit: {}, Offset: {}", query, limit, offset); + try { + return jdbcTemplate.queryForList(query, limit, offset); + } catch (Exception e) { + logger.error("Error fetching batch for beneficiary details from table {}: {}", table, e.getMessage(), e); + System.out.println("Error fetching batch for beneficiary details from table " + table + ": " + e.getMessage()); + throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); } -} + } + + // End of Data Download Repository +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 69be169f..4a97725b 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -49,6 +49,17 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB private static final Map> TABLE_GROUPS = new HashMap<>(); static { + // Group 1: Master data or less frequently changing data + TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "m_another_master_table")); + + // Group 2: Transactional data that might involve facility ID + TABLE_GROUPS.put(2, Arrays.asList("t_indent", "t_indentorder", "t_indentissue", "t_stocktransfer", "t_itemstockentry")); + + // Group 3: High volume transactional data + TABLE_GROUPS.put(3, Arrays.asList("i_beneficiarydetails", "t_patientissue", "t_physicalstockentry", + "t_stockadjustment", "t_saitemmapping", "t_patientreturn", + "t_facilityconsumption", "t_itemstockexit")); + // Add more groups as needed, up to 9 TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); TABLE_GROUPS.put(2, Arrays.asList("t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity")); @@ -67,11 +78,10 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB TABLE_GROUPS.put(9, Arrays.asList("t_itemstockentry","t_itemstockexit","t_patientissue","t_physicalstockentry","t_stockadjustment","t_stocktransfer","t_patientreturn","t_facilityconsumption","t_indent","t_indentissue","t_indentorder","t_saitemmapping")); - } + } public String syncDataToServer(String requestOBJ, String Authorization, String token) throws Exception { logger.info("Starting syncDataToServer. Token: {}", token); - ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); @@ -91,7 +101,8 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } else if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { + } + if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); if ("data sync passed".equals(result)) { return "Sync successful for i_beneficiarydetails."; @@ -271,12 +282,13 @@ public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDat while (true) { List> batch; try { - // *** ADD THIS LINE *** logger.info("DEBUG: Passing whereClause to getBatchForBenDetails: [{}]", problematicWhereClause); batch = dataSyncRepositoryCentral.getBatchForBenDetails( - syncUploadDataDigester, - problematicWhereClause, + syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName(), + syncUploadDataDigester.getServerColumns(), + problematicWhereClause, // Use the variable limit, offset); } catch (Exception e) { From 43df9cf6563abc6ed1c5702e9a062e5386480a28 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Sun, 10 Aug 2025 19:05:10 +0530 Subject: [PATCH 17/45] Fixing the datasync from local to central (#110) * fix: datasync from local to central * fix: fix the token --- .../MMUDataSyncVanToServer.java | 6 +- .../dataSyncActivity/DataSyncRepository.java | 33 ++- .../UploadDataToServerImpl.java | 53 +--- .../DataSyncRepositoryCentral.java | 238 ++++++++------- .../GetDataFromVanAndSyncToDBImpl.java | 280 +++++++++++------- .../mmu/utils/JwtUserIdValidationFilter.java | 15 +- .../com/iemr/mmu/utils/RestTemplateUtil.java | 16 +- 7 files changed, 337 insertions(+), 304 deletions(-) diff --git a/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java b/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java index eeb54e9f..524094e5 100644 --- a/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java +++ b/src/main/java/com/iemr/mmu/controller/dataSyncLayerCentral/MMUDataSyncVanToServer.java @@ -63,12 +63,8 @@ public String dataSyncToServer(@RequestBody String requestOBJ, @RequestHeader(value = "Authorization") String Authorization, HttpServletRequest request) { OutputResponse response = new OutputResponse(); - logger.info("test: vanto server auth="+Authorization); try { - String jwtToken = CookieUtil.getJwtTokenFromCookie(request); - logger.info("test: vanto server token="+jwtToken); - - String s = getDataFromVanAndSyncToDBImpl.syncDataToServer(requestOBJ, Authorization, jwtToken); + String s = getDataFromVanAndSyncToDBImpl.syncDataToServer(requestOBJ, Authorization); if (s != null) response.setResponse(s); else diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index b039a47f..9b020a4e 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -28,7 +28,10 @@ import javax.sql.DataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.annotation.CreatedDate; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Service; @@ -50,6 +53,8 @@ public class DataSyncRepository { @Autowired private SyncUtilityClassRepo syncutilityClassRepo; + private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + private JdbcTemplate getJdbcTemplate() { return new JdbcTemplate(dataSource); @@ -64,7 +69,7 @@ public List> getDataForGivenSchemaAndTable(String schema, St if (table != null && table.equalsIgnoreCase("m_beneficiaryregidmapping")) { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE provisioned is true AND processed <> 'P' AND vanID is not null "; + + " WHERE provisioned is true AND processed != 'P' AND vanID is not null "; } else { if (table != null && (table.equalsIgnoreCase("t_patientissue") || table.equalsIgnoreCase("t_physicalstockentry") || table.equalsIgnoreCase("t_stockadjustment") @@ -75,30 +80,36 @@ public List> getDataForGivenSchemaAndTable(String schema, St || table.equalsIgnoreCase("t_itemstockexit"))) { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE processed <> 'P' AND SyncFacilityID is not null "; + + " WHERE processed != 'P' AND SyncFacilityID is not null "; } else { baseQuery = " SELECT " + columnNames + " FROM " + schema + "." + table - + " WHERE processed <> 'P' AND vanID is not null "; + + " WHERE processed != 'P' AND vanID is not null "; } } + resultSetList = jdbcTemplate.queryForList(baseQuery); return resultSetList; } - - public int updateProcessedFlagInVan(String schemaName, String tableName, StringBuilder vanSerialNos, String autoIncreamentColumn, String user) throws Exception { jdbcTemplate = getJdbcTemplate(); - String query = " UPDATE " + schemaName + "." + tableName - + " SET processed = 'P' , SyncedDate = ?, Syncedby = ? WHERE " + autoIncreamentColumn - + " IN (" + vanSerialNos + ")"; + String query = ""; - Timestamp syncedDate = new Timestamp(System.currentTimeMillis()); - int updatedRows = jdbcTemplate.update(query, syncedDate, user); + if (tableName != null && tableName.toLowerCase().equals("i_ben_flow_outreach")) { + query = "UPDATE " + schemaName + "." + tableName + + " SET created_date = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + } else { + query = "UPDATE " + schemaName + "." + tableName + + " SET CreatedDate = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + } + Timestamp syncedDate = new Timestamp(System.currentTimeMillis()); + int updatedRows = jdbcTemplate.update(query, syncedDate, syncedDate, user); return updatedRows; } @@ -119,4 +130,4 @@ public int[] updateLatestMasterInLocal(String query, List syncDataList // ---------------------------------- End of Download Repository -} +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 9476ecc7..2c24dc2c 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -243,32 +243,8 @@ private void setResponseStatus(Map groupIdStatus, int groupId, S groupIdStatus.put("groupId", String.valueOf(groupId)); groupIdStatus.put("status", serverAcknowledgement); responseStatus.add(groupIdStatus); - logger.info("Response from data sync: {}", responseStatus); } -// private boolean setResponseStatus(Map groupIdStatus, int groupId, String serverAcknowledgement, -// List> responseStatus, boolean isProgress) { -// if (serverAcknowledgement != null) { -// groupIdStatus.put("groupId", String.valueOf(groupId)); -// groupIdStatus.put("status", serverAcknowledgement); -// responseStatus.add(groupIdStatus); -// logger.info("Response from data sync", responseStatus); -// } else if (isProgress) { -// groupIdStatus.put("groupId", String.valueOf(groupId)); -// groupIdStatus.put("status", "pending"); -// responseStatus.add(groupIdStatus); -// logger.info("Response from data sync", responseStatus); -// } else { -// isProgress = true; -// groupIdStatus.put("groupId", String.valueOf(groupId)); -// groupIdStatus.put("status", "failed"); -// responseStatus.add(groupIdStatus); -// logger.info("Response from data sync", responseStatus); -// } -// return isProgress; -// -// } - /** * * @param syncTableDetailsIDs @@ -299,7 +275,6 @@ public List getVanAndServerColumnList(Integer groupID) throws private List> getDataToSync(String schemaName, String tableName, String columnNames) throws Exception { - logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); List> resultSetList = dataSyncRepository.getDataForGivenSchemaAndTable(schemaName, tableName, columnNames); if (resultSetList != null) { @@ -343,14 +318,11 @@ private List> getBatchOfAskedSizeDataToSync(List> dataToBesync, String user, String Authorization, String token) throws Exception { - logger.debug( - "Entering syncDataToServer with vanID: {}, schemaName: '{}', tableName: '{}', vanAutoIncColumnName: '{}', serverColumns: '{}', user: '{}'", - vanID, schemaName, tableName, vanAutoIncColumnName, serverColumns, user); + RestTemplate restTemplate = new RestTemplate(); Integer facilityID = masterVanRepo.getFacilityID(vanID); - logger.debug("Fetched facilityID for vanID {}: {}", vanID, facilityID); // serialize null GsonBuilder gsonBuilder = new GsonBuilder(); @@ -368,33 +340,18 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S dataMap.put("facilityID", facilityID); String requestOBJ = gson.toJson(dataMap); - - HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization,token); - logger.info("Before Data sync upload Url" + dataSyncUploadUrl); + HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization,"datasync"); ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, String.class); - logger.info("Received response from data sync URL: {}", response); - logger.info("Received response from data sync URL: {}", dataSyncUploadUrl); - - logger.info("After Data sync upload Url" + dataSyncUploadUrl); - /** - * if data successfully synced then getVanSerialNo of synced data to update - * processed flag - */ + int i = 0; if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("statusCode") && obj.getInt("statusCode") == 200) { StringBuilder vanSerialNos = getVanSerialNoListForSyncedData(vanAutoIncColumnName, dataToBesync); - logger.info( - "Updating processed flag for schemaName: {}, tableName: {}, vanSerialNos: {}, vanAutoIncColumnName: {}, user: {}", - schemaName, tableName, vanSerialNos.toString(), vanAutoIncColumnName, user); - // update table for processed flag = "P" - logger.info(schemaName + "|" + tableName + "|" + vanSerialNos.toString() + "|" + vanAutoIncColumnName - + "|" + user); + i = dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, vanSerialNos, vanAutoIncColumnName, user); - logger.debug("Updated processed flag in database. Records affected: {}", i); } } if (i > 0) @@ -435,4 +392,4 @@ public String getDataSyncGroupDetails() { return null; } -} +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java index bdba82d0..a5d8422d 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java @@ -21,19 +21,21 @@ */ package com.iemr.mmu.service.dataSyncLayerCentral; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import javax.sql.DataSource; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Service; -import javax.sql.DataSource; -import java.sql.Timestamp; -import java.sql.Statement; // Import Statement for batchUpdate result interpretation -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; +import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; @Service public class DataSyncRepositoryCentral { @@ -42,7 +44,6 @@ public class DataSyncRepositoryCentral { private JdbcTemplate jdbcTemplate; - // Lazily initialize jdbcTemplate to ensure DataSource is available private JdbcTemplate getJdbcTemplate() { if (this.jdbcTemplate == null) { this.jdbcTemplate = new JdbcTemplate(dataSource); @@ -50,161 +51,176 @@ private JdbcTemplate getJdbcTemplate() { return this.jdbcTemplate; } - private Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + + private static final Set VALID_SCHEMAS = Set.of("public", "db_iemr", "db_identity","apl_db_iemr","apl_db_identity","db_iemr_sync","db_identity_sync"); + + private static final Set VALID_TABLES = Set.of( + "m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", "i_beneficiarycontacts", + "i_beneficiarydetails", "i_beneficiaryfamilymapping", "i_beneficiaryidentity", "i_beneficiarymapping", + "t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", "t_pnccare", + "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", "t_physicalactivity", + "t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", "t_sys_cardiovascular", + "t_sys_respiratory", "t_sys_centralnervous", "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem", + "t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", "t_benclinicalobservation", + "t_prescription", "t_prescribeddrug", "t_lab_testorder", "t_benreferdetails", + "t_lab_testresult", "t_physicalstockentry", "t_patientissue", "t_facilityconsumption", "t_itemstockentry", + "t_itemstockexit", "t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", + "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", + "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", + "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory", + "t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", "t_cancerobstetrichistory", + "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", "t_canceroralexamination", + "t_cancerbreastexamination", "t_cancerabdominalexamination", "t_cancergynecologicalexamination", + "t_cancerdiagnosis", "t_cancerimageannotation", "i_beneficiaryimage", "t_stockadjustment", + "t_stocktransfer", "t_patientreturn", "t_indent", "t_indentissue", "t_indentorder", "t_saitemmapping"); + + private boolean isValidDatabaseIdentifierCharacter(String identifier) { + return identifier != null && identifier.matches("^[a-zA-Z_][a-zA-Z0-9_]*$"); + } + + private boolean isValidSchemaName(String schemaName) { + return VALID_SCHEMAS.contains(schemaName.toLowerCase()); + } + + private boolean isValidTableName(String tableName) { + return VALID_TABLES.contains(tableName.toLowerCase()); + } + + private boolean isValidColumnNamesList(String columnNames) { + if (columnNames == null || columnNames.trim().isEmpty()) { + return false; + } + for (String col : columnNames.split(",")) { + if (!isValidDatabaseIdentifierCharacter(col.trim())) { + return false; + } + } + return true; + } - // Data Upload Repository public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, - String vanAutoIncColumnName, int syncFacilityID) { + String vanAutoIncColumnName, int syncFacilityID) { jdbcTemplate = getJdbcTemplate(); - List params = new ArrayList<>(); - StringBuilder queryBuilder = new StringBuilder("SELECT "); - queryBuilder.append(vanAutoIncColumnName); - queryBuilder.append(" FROM "); - queryBuilder.append(schemaName).append(".").append(tableName); + if (!isValidSchemaName(schemaName) || !isValidTableName(tableName) || + !isValidDatabaseIdentifierCharacter(vanAutoIncColumnName)) { + logger.error("Invalid identifiers: schema={}, table={}, column={}", schemaName, tableName, + vanAutoIncColumnName); + throw new IllegalArgumentException("Invalid identifiers provided."); + } + + StringBuilder queryBuilder = new StringBuilder("SELECT ") + .append(vanAutoIncColumnName).append(" FROM ") + .append(schemaName).append(".").append(tableName).append(" WHERE VanSerialNo = ?"); - StringBuilder whereClause = new StringBuilder(); - whereClause.append(" WHERE "); - whereClause.append("VanSerialNo = ?"); params.add(vanSerialNo); - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + if (List.of("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") .contains(tableName.toLowerCase()) && syncFacilityID > 0) { - - whereClause.append(" AND "); - whereClause.append("SyncFacilityID = ?"); + queryBuilder.append(" AND SyncFacilityID = ?"); params.add(syncFacilityID); - } else { - whereClause.append(" AND "); - whereClause.append("VanID = ?"); + queryBuilder.append(" AND VanID = ?"); params.add(vanID); } - queryBuilder.append(whereClause); - String query = queryBuilder.toString(); - Object[] queryParams = params.toArray(); - - logger.debug("Checking record existence query: {} with params: {}", query, Arrays.toString(queryParams)); - System.out.println("Checking record existence query: " + query + " with params: " + Arrays.toString(queryParams)); - try { - List> resultSet = jdbcTemplate.queryForList(query, queryParams); - if (resultSet != null && !resultSet.isEmpty()) { - System.out.println("Record found for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); - logger.debug("Record found for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); - return 1; - } else { - System.out.println("No record found for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); - logger.debug("No record found for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); - return 0; - } - } catch (org.springframework.dao.EmptyResultDataAccessException e) { - System.out.println("No record found (EmptyResultDataAccessException) for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); - logger.debug("No record found (EmptyResultDataAccessException) for table {}: VanSerialNo={}, VanID={}", tableName, vanSerialNo, vanID); - return 0; + List> resultSet = jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); + return (resultSet != null && !resultSet.isEmpty()) ? 1 : 0; } catch (Exception e) { - System.out.println("Database error during checkRecordIsAlreadyPresentOrNot for table " + tableName + ": VanSerialNo=" + vanSerialNo + ", VanID=" + vanID); - logger.error("Database error during checkRecordIsAlreadyPresentOrNot for table {}: VanSerialNo={}, VanID={}. Error: {}", tableName, vanSerialNo, vanID, e.getMessage(), e); - throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); // Re-throw or handle as appropriate + logger.error("Error checking record presence: {}", e.getMessage(), e); + throw new RuntimeException("Failed to check record existence: " + e.getMessage(), e); } } - // Method for synchronization of data to central DB public int[] syncDataToCentralDB(String schema, String tableName, String serverColumns, String query, - List syncDataList) { + List syncDataList) { jdbcTemplate = getJdbcTemplate(); - logger.info("Executing batch operation for table: {}. Query type: {}. Number of records: {}", tableName, query.startsWith("INSERT") ? "INSERT" : "UPDATE", syncDataList.size()); - logger.debug("Query: {}", query); -System.out.println("Executing batch operation for table: " + tableName + ". Query type: " + (query.startsWith("INSERT") ? "INSERT" : "UPDATE") + ". Number of records: " + syncDataList.size()); try { - // Start batch insert/update - int[] i = jdbcTemplate.batchUpdate(query, syncDataList); - System.out.println("Batch operation completed for table " + tableName + ". Results: " + Arrays.toString(i)); - logger.info("Batch operation completed for table {}. Results: {}", tableName, Arrays.toString(i)); - return i; + + return jdbcTemplate.batchUpdate(query, syncDataList); } catch (Exception e) { - logger.error("Exception during batch update for table {}: {}", tableName, e.getMessage(), e); - System.out.println("Exception during batch update for table " + tableName + ": " + e.getMessage()); - // Log the error with detailed information - // Re-throw the exception to be handled by the service layer, so specific errors can be captured. - throw new RuntimeException("Batch sync failed for table " + tableName + ": " + e.getMessage(), e); + logger.error("Batch sync failed for table {}: {}", tableName, e.getMessage(), e); + throw new RuntimeException("Batch sync failed: " + e.getMessage(), e); } } - // End of Data Upload Repository - public List> getMasterDataFromTable(String schema, String table, String columnNames, - String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) throws Exception { + String masterType, Timestamp lastDownloadDate, Integer vanID, Integer psmID) { jdbcTemplate = getJdbcTemplate(); - List> resultSetList = new ArrayList<>(); - StringBuilder baseQueryBuilder = new StringBuilder(" SELECT ").append(columnNames).append(" FROM ").append(schema).append(".").append(table); List params = new ArrayList<>(); + if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { + throw new IllegalArgumentException("Invalid schema, table, or column names."); + } + + StringBuilder queryBuilder = new StringBuilder("SELECT ").append(columnNames) + .append(" FROM ").append(schema).append(".").append(table); + if (masterType != null) { if (lastDownloadDate != null) { - baseQueryBuilder.append(" WHERE LastModDate >= ? "); + queryBuilder.append(" WHERE LastModDate >= ?"); params.add(lastDownloadDate); - if (masterType.equalsIgnoreCase("V")) { - baseQueryBuilder.append(" AND VanID = ? "); + if ("V".equalsIgnoreCase(masterType)) { + queryBuilder.append(" AND VanID = ?"); params.add(vanID); - } else if (masterType.equalsIgnoreCase("P")) { - baseQueryBuilder.append(" AND ProviderServiceMapID = ? "); + } else if ("P".equalsIgnoreCase(masterType)) { + queryBuilder.append(" AND ProviderServiceMapID = ?"); params.add(psmID); } } else { - if (masterType.equalsIgnoreCase("V")) { - baseQueryBuilder.append(" WHERE VanID = ? "); + queryBuilder.append(" WHERE "); + if ("V".equalsIgnoreCase(masterType)) { + queryBuilder.append("VanID = ?"); params.add(vanID); - } else if (masterType.equalsIgnoreCase("P")) { - baseQueryBuilder.append(" WHERE ProviderServiceMapID = ? "); + } else if ("P".equalsIgnoreCase(masterType)) { + queryBuilder.append("ProviderServiceMapID = ?"); params.add(psmID); } } } - String finalQuery = baseQueryBuilder.toString(); - logger.info("Select query central: {}", finalQuery); - logger.info("Last Downloaded Date: {}", lastDownloadDate); - logger.info("Query Params: {}", params); - System.out.println("Select query central: " + finalQuery); - System.out.println("Last Downloaded Date: " + lastDownloadDate); - System.out.println("Query Params: " + params); - try { - if (params.isEmpty()) { - resultSetList = jdbcTemplate.queryForList(finalQuery); - } else { - resultSetList = jdbcTemplate.queryForList(finalQuery, params.toArray()); - } + // Safe dynamic SQL: All dynamic parts (table names, columns, etc.) are + // validated or hardcoded. + // Parameter values are bound safely using prepared statement placeholders (?). + + return jdbcTemplate.queryForList(queryBuilder.toString(), params.toArray()); } catch (Exception e) { - System.out.println("Error fetching master data from table " + table + ": " + e.getMessage()); - logger.error("Error fetching master data from table {}: {}", table, e.getMessage(), e); + logger.error("Error fetching master data: {}", e.getMessage(), e); throw new RuntimeException("Failed to fetch master data: " + e.getMessage(), e); } -System.out.println("Result set Details size: " + resultSetList.size()); - logger.info("Result set Details size: {}", resultSetList.size()); - return resultSetList; } - public List> getBatchForBenDetails(String schema, String table, String columnNames, - String whereClause, int limit, int offset) { + public List> getBatchForBenDetails(SyncUploadDataDigester digester, + String whereClause, int limit, int offset) { jdbcTemplate = getJdbcTemplate(); - String query = "SELECT " + columnNames + " FROM " + schema + "." + table + whereClause + " LIMIT ? OFFSET ?"; - System.out.println("Fetching batch for beneficiary details. Query: " + query + ", Limit: " + limit + ", Offset: " + offset); - logger.debug("Fetching batch for beneficiary details. Query: {}, Limit: {}, Offset: {}", query, limit, offset); - try { - return jdbcTemplate.queryForList(query, limit, offset); - } catch (Exception e) { - logger.error("Error fetching batch for beneficiary details from table {}: {}", table, e.getMessage(), e); - System.out.println("Error fetching batch for beneficiary details from table " + table + ": " + e.getMessage()); - throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); - } - } - // End of Data Download Repository + String schema = digester.getSchemaName(); + String table = digester.getTableName(); + String columnNames = digester.getServerColumns(); + + if (!isValidSchemaName(schema) || !isValidTableName(table) || !isValidColumnNamesList(columnNames)) { + throw new IllegalArgumentException("Invalid schema, table, or column names."); + } + // Safe dynamic SQL: Schema, table, and column names are validated against + // predefined whitelists. + // Only trusted values are used in the query string. + // limit and offset are passed as parameters to prevent SQL injection. + String query = String.format("SELECT %s FROM %s.%s %s LIMIT ? OFFSET ?", columnNames, schema, table, + whereClause); // NOSONAR + + try { + + return jdbcTemplate.queryForList(query, limit, offset); + } catch (Exception e) { + logger.error("Error fetching batch details: {}", e.getMessage(), e); + throw new RuntimeException("Failed to fetch batch data: " + e.getMessage(), e); + } + } } \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 4a97725b..cd33c2b5 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -24,10 +24,10 @@ import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.HashMap; - +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,7 +37,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; - @Service public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { @@ -47,44 +46,57 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB @Autowired private DataSyncRepositoryCentral dataSyncRepositoryCentral; + private static final Map> TABLE_GROUPS = new HashMap<>(); static { - // Group 1: Master data or less frequently changing data - TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "m_another_master_table")); - - // Group 2: Transactional data that might involve facility ID - TABLE_GROUPS.put(2, Arrays.asList("t_indent", "t_indentorder", "t_indentissue", "t_stocktransfer", "t_itemstockentry")); + TABLE_GROUPS.put(1, + Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", + "i_beneficiarycontacts", "i_beneficiarydetails", "i_beneficiaryfamilymapping", + "i_beneficiaryidentity", "i_beneficiarymapping")); + + TABLE_GROUPS.put(2, + Arrays.asList("t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", + "t_pnccare", "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", + "t_physicalactivity")); + + TABLE_GROUPS.put(3, + Arrays.asList("t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", + "t_sys_cardiovascular", "t_sys_respiratory", "t_sys_centralnervous", + "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem")); + + TABLE_GROUPS.put(4, + Arrays.asList("t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", + "t_benclinicalobservation", "t_prescription", "t_prescribeddrug", "t_lab_testorder", + "t_benreferdetails")); + + TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult", "t_physicalstockentry", "t_patientissue", + "t_facilityconsumption", "t_itemstockentry", "t_itemstockexit")); + + TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", + "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", + "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", + "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory")); + + TABLE_GROUPS.put(7, + Arrays.asList("t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", + "t_cancerobstetrichistory", "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", + "t_canceroralexamination", "t_cancerbreastexamination", "t_cancerabdominalexamination", + "t_cancergynecologicalexamination", "t_cancerdiagnosis", "t_cancerimageannotation")); - // Group 3: High volume transactional data - TABLE_GROUPS.put(3, Arrays.asList("i_beneficiarydetails", "t_patientissue", "t_physicalstockentry", - "t_stockadjustment", "t_saitemmapping", "t_patientreturn", - "t_facilityconsumption", "t_itemstockexit")); - // Add more groups as needed, up to 9 - TABLE_GROUPS.put(1, Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount","i_beneficiaryaddress","i_beneficiarycontacts","i_beneficiarydetails","i_beneficiaryfamilymapping","i_beneficiaryidentity","i_beneficiarymapping")); - - TABLE_GROUPS.put(2, Arrays.asList("t_benvisitdetail","t_phy_anthropometry","t_phy_vitals","t_benadherence","t_anccare","t_pnccare","t_ncdscreening","t_ncdcare","i_ben_flow_outreach","t_covid19","t_idrsdetails","t_physicalactivity")); - - TABLE_GROUPS.put(3, Arrays.asList("t_phy_generalexam","t_phy_headtotoe","t_sys_obstetric","t_sys_gastrointestinal","t_sys_cardiovascular","t_sys_respiratory","t_sys_centralnervous","t_sys_musculoskeletalsystem","t_sys_genitourinarysystem")); - - TABLE_GROUPS.put(4, Arrays.asList("t_ancdiagnosis","t_ncddiagnosis","t_pncdiagnosis","t_benchefcomplaint","t_benclinicalobservation","t_prescription","t_prescribeddrug","t_lab_testorder","t_benreferdetails")); - - TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult","t_physicalstockentry","t_patientissue","t_facilityconsumption","t_itemstockentry","t_itemstockexit")); + TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); - TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory","t_femaleobstetrichistory","t_benmenstrualdetails","t_benpersonalhabit","t_childvaccinedetail1","t_childvaccinedetail2","t_childoptionalvaccinedetail","t_ancwomenvaccinedetail","t_childfeedinghistory","t_benallergyhistory","t_bencomorbiditycondition","t_benmedicationhistory","t_benfamilyhistory","t_perinatalhistory","t_developmenthistory")); + TABLE_GROUPS.put(9, + Arrays.asList("t_itemstockentry", "t_itemstockexit", "t_patientissue", "t_physicalstockentry", + "t_stockadjustment", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentissue", "t_indentorder", "t_saitemmapping")); - TABLE_GROUPS.put(7, Arrays.asList("t_cancerfamilyhistory","t_cancerpersonalhistory","t_cancerdiethistory","t_cancerobstetrichistory","t_cancervitals","t_cancersignandsymptoms","t_cancerlymphnode","t_canceroralexamination","t_cancerbreastexamination","t_cancerabdominalexamination","t_cancergynecologicalexamination","t_cancerdiagnosis","t_cancerimageannotation")); + } - TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); - - TABLE_GROUPS.put(9, Arrays.asList("t_itemstockentry","t_itemstockexit","t_patientissue","t_physicalstockentry","t_stockadjustment","t_stocktransfer","t_patientreturn","t_facilityconsumption","t_indent","t_indentissue","t_indentorder","t_saitemmapping")); - - } + public String syncDataToServer(String requestOBJ, String Authorization) throws Exception { - public String syncDataToServer(String requestOBJ, String Authorization, String token) throws Exception { - logger.info("Starting syncDataToServer. Token: {}", token); ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); - +List> dataToBesync = syncUploadDataDigester.getSyncData(); if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { logger.error("Invalid SyncUploadDataDigester object or tableName is null."); return "Error: Invalid sync request."; @@ -101,8 +113,7 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } - if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { + } else if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); if ("data sync passed".equals(result)) { return "Sync successful for i_beneficiarydetails."; @@ -111,28 +122,35 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t return "Sync failed for i_beneficiarydetails."; } } else { - // Determine the group for the current table or iterate through all if no specific table is given + // Determine the group for the current table or iterate through all if no + // specific table is given boolean syncSuccess = true; String errorMessage = ""; - // If a specific table is provided in the request, try to find its group and sync only that table. + // If a specific table is provided in the request, try to find its group and + // sync only that table. // Otherwise, iterate through all defined groups. if (syncTableName != null && !syncTableName.isEmpty()) { boolean foundInGroup = false; - for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { - if (entry.getValue().contains(syncTableName.toLowerCase())) { - logger.info("Attempting to sync table '{}' from Group {}", syncTableName, entry.getKey()); - syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, syncUploadDataDigester); + + for (Map map : dataToBesync) { + // if (entry.getValue().contains(syncTableName.toLowerCase())) { + if(map.get("tableName") != null + && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { + syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, + syncUploadDataDigester); foundInGroup = true; break; } } if (!foundInGroup) { - logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", syncTableName); + logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", + syncTableName); syncSuccess = performGenericTableSync(syncUploadDataDigester); } } else { - // If no specific table is in the request (e.g., a general sync trigger), iterate through groups + // If no specific table is in the request (e.g., a general sync trigger), + // iterate through groups logger.info("No specific table provided. Attempting to sync all tables group by group."); for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { Integer groupId = entry.getKey(); @@ -142,26 +160,34 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t try { // Create a new digester for each table within the group, // or adapt if the original digester contains data for multiple tables. - // For simplicity, assuming syncDataDigester needs to be tailored per table or group. - // If your requestOBJ contains data for only one table at a time, this loop might need adjustment + // For simplicity, assuming syncDataDigester needs to be tailored per table or + // group. + // If your requestOBJ contains data for only one table at a time, this loop + // might need adjustment // to fetch data for each table in the group. // For now, it will use the syncData from the original requestOBJ, which implies // the original requestOBJ should represent data for a single table. - // A more robust solution would involve fetching data for each table dynamically. - boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), table, syncUploadDataDigester); + // A more robust solution would involve fetching data for each table + // dynamically. + boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), + table, syncUploadDataDigester); if (!currentTableSyncResult) { syncSuccess = false; errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; - logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, errorMessage); - // Optionally, you can choose to break here or continue to sync other tables in the group/next group + logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, + errorMessage); + // Optionally, you can choose to break here or continue to sync other tables in + // the group/next group // For now, let's continue to attempt other tables within the group. } else { logger.info("Successfully synced table: {} in Group {}", table, groupId); } } catch (Exception e) { syncSuccess = false; - errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + e.getMessage() + ". "; - logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, e.getMessage(), e); + errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + + e.getMessage() + ". "; + logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, + e.getMessage(), e); // Continue to attempt other tables } } @@ -182,35 +208,35 @@ public String syncDataToServer(String requestOBJ, String Authorization, String t * with relevant data for the `currentTableName` before calling this. * In a real-world scenario, you might fetch data for each table here. */ - private boolean syncTablesInGroup(String schemaName, String currentTableName, SyncUploadDataDigester originalDigester) { + private boolean syncTablesInGroup(String schemaName, String currentTableName, + SyncUploadDataDigester originalDigester) { logger.info("Attempting generic sync for table: {}", currentTableName); // This is a simplification. In a production system, you would likely need // to retrieve the actual data for 'currentTableName' from the local DB // based on the group sync approach. For this example, we'll assume the // originalDigester's syncData is relevant or needs to be re-populated. - // Create a new digester instance or modify the existing one for the current table + // Create a new digester instance or modify the existing one for the current + // table SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); tableSpecificDigester.setTableName(currentTableName); tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); - tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is generic or set per table + tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is + // generic or set per table // !!! IMPORTANT: You'll need to fetch the data for 'currentTableName' from your local DB here. // The `originalDigester.getSyncData()` might not be correct for all tables in a group. // For demonstration, I'm just using the original digester's data, which is likely incorrect - // if you're syncing multiple tables from a single request. - // You'll need a method like: dataSyncRepositoryLocal.getDataForTable(currentTableName, ...) - tableSpecificDigester.setSyncData(originalDigester.getSyncData()); // Placeholder: Replace with actual data fetching - + tableSpecificDigester.setSyncData(originalDigester.getSyncData()); return performGenericTableSync(tableSpecificDigester); } - - private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUploadDataDigester syncUploadDataDigester) { - logger.info("Processing update_M_BeneficiaryRegIdMapping_for_provisioned_benID for table: {}", syncUploadDataDigester.getTableName()); + private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( + SyncUploadDataDigester syncUploadDataDigester) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); @@ -226,20 +252,24 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID(SyncUpload objArr[3] = String.valueOf(map.get("VanID")); syncData.add(objArr); } else { - logger.warn("Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", map); + logger.warn( + "Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", + map); } } if (!syncData.isEmpty()) { try { int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), SERVER_COLUMNS_NOT_REQUIRED, query, syncData); + syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, syncData); if (i.length == syncData.size()) { logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); return "data sync passed"; } else { - logger.error("Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", syncData.size(), i.length, getFailedRecords(i, syncData)); + logger.error( + "Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", + syncData.size(), i.length, getFailedRecords(i, syncData)); return "Partial data sync for m_beneficiaryregidmapping."; } } catch (Exception e) { @@ -263,48 +293,47 @@ private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String t queryBuilder.append(" BeneficiaryID = ? "); queryBuilder.append(" AND "); queryBuilder.append(" VanID = ? "); + return queryBuilder.toString(); } public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { - logger.info("Processing update_I_BeneficiaryDetails_for_processed_in_batches for table: {}", syncUploadDataDigester.getTableName()); - List syncData = new ArrayList<>(); + List syncData = new ArrayList<>(); - String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName()); + String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); - int limit = 1000; - int offset = 0; - int totalProcessed = 0; + int limit = 1000; + int offset = 0; + int totalProcessed = 0; - String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly + String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly while (true) { List> batch; try { - logger.info("DEBUG: Passing whereClause to getBatchForBenDetails: [{}]", problematicWhereClause); - - batch = dataSyncRepositoryCentral.getBatchForBenDetails( - syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), - syncUploadDataDigester.getServerColumns(), - problematicWhereClause, // Use the variable - limit, - offset); - } catch (Exception e) { - logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); - return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); - } - - if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a "success" - logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); - return "data sync passed"; - } else { - logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); - return "No data processed or sync failed for i_beneficiarydetails."; + + batch = dataSyncRepositoryCentral.getBatchForBenDetails( + syncUploadDataDigester, + problematicWhereClause, + limit, + offset); + } catch (Exception e) { + logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); + } + + if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a + // "success" + logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); + return "data sync passed"; + } else { + logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); + return "No data processed or sync failed for i_beneficiarydetails."; + } } } - } + private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { StringBuilder queryBuilder = new StringBuilder(" UPDATE "); queryBuilder.append(schemaName).append(".").append(tableName); @@ -317,12 +346,11 @@ private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableN return queryBuilder.toString(); } - /** - * Handles the generic synchronization logic for tables not covered by specific handlers. + * Handles the generic synchronization logic for tables not covered by specific + * handlers. */ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { - logger.info("Performing generic sync for table: {}", syncUploadDataDigester.getTableName()); List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncDataListInsert = new ArrayList<>(); List syncDataListUpdate = new ArrayList<>(); @@ -342,10 +370,16 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanID = String.valueOf(map.get("VanID")); int syncFacilityID = 0; - // Update SyncedBy and SyncedDate in the map itself before processing + // Update SyncedBy and SyncedDate in the xmap itself before processing map.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); map.put("SyncedDate", String.valueOf(LocalDateTime.now())); // Ensure column name matches DB - + if (map.get("CreatedDate") == null || map.get("created_date") == null) { + logger.info("CreatedDate was null for table: " + syncTableName + ", inserting current time"); + if(map.get("CreatedDate") == null) + map.put("CreatedDate", String.valueOf(LocalDateTime.now())); + if(map.get("created_date") == null) + map.put("created_date", String.valueOf(LocalDateTime.now())); + } // Facility ID processing if (facilityIDFromDigester != null) { // Determine the 'Processed' status based on facility ID for specific tables @@ -370,7 +404,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig break; } case "t_stocktransfer": { - if (map.containsKey("TransferToFacilityID") && map.get("TransferToFacilityID") instanceof Double) { + if (map.containsKey("TransferToFacilityID") + && map.get("TransferToFacilityID") instanceof Double) { Double transferToFacilityID = (Double) map.get("TransferToFacilityID"); if (transferToFacilityID.intValue() == facilityIDFromDigester) { map.put("Processed", "P"); @@ -388,7 +423,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig break; } default: - // No specific facility ID logic for other tables, maintain existing 'Processed' status or default + // No specific facility ID logic for other tables, maintain existing 'Processed' + // status or default break; } } @@ -400,13 +436,13 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig syncFacilityID = ((Double) map.get("SyncFacilityID")).intValue(); } - int recordCheck; try { recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); } catch (Exception e) { - logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", syncTableName, vanSerialNo, vanID, e.getMessage(), e); + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", + syncTableName, vanSerialNo, vanID, e.getMessage(), e); return false; // Critical error, stop sync for this table } @@ -417,7 +453,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig for (String column : serverColumnsList) { Object value = map.get(column.trim()); - // Handle boolean conversion if necessary, though String.valueOf should generally work for prepared statements + // Handle boolean conversion if necessary, though String.valueOf should + // generally work for prepared statements if (value instanceof Boolean) { currentRecordValues.add(value); } else if (value != null) { @@ -453,11 +490,15 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig if (!syncDataListInsert.isEmpty()) { String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns()); + try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns(), queryInsert, syncDataListInsert); + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + syncUploadDataDigester.getServerColumns(), queryInsert, syncDataListInsert); if (i.length != syncDataListInsert.size()) { insertSuccess = false; - logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", syncTableName, syncDataListInsert.size(), i.length, getFailedRecords(i, syncDataListInsert)); + logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", + syncTableName, syncDataListInsert.size(), i.length, + getFailedRecords(i, syncDataListInsert)); } else { logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); } @@ -469,11 +510,15 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig if (!syncDataListUpdate.isEmpty()) { String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, syncUploadDataDigester.getServerColumns(), syncTableName); + // Ensure the update query is correct and matches the expected format try { - int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); if (j.length != syncDataListUpdate.size()) { updateSuccess = false; - logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", syncTableName, syncDataListUpdate.size(), j.length, getFailedRecords(j, syncDataListUpdate)); + logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", + syncTableName, syncDataListUpdate.size(), j.length, + getFailedRecords(j, syncDataListUpdate)); } else { logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); } @@ -518,16 +563,24 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol StringBuilder preparedStatementSetter = new StringBuilder(); - if (columnsArr != null && columnsArr.length > 0) { + if (columnsArr != null && columnsArr.length > 0) { for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append(columnsArr[i].trim()); - preparedStatementSetter.append(" = ?"); + String columnName = columnsArr[i].trim(); // ← NEW LINE + + // Special handling for CreatedDate - use COALESCE to prevent NULL + if (columnName.equalsIgnoreCase("CreatedDate")) { // ← NEW BLOCK + preparedStatementSetter.append(columnName); + preparedStatementSetter.append(" = COALESCE(?, CURRENT_TIMESTAMP)"); + } else { + preparedStatementSetter.append(columnName); + preparedStatementSetter.append(" = ?"); + } + if (i < columnsArr.length - 1) { preparedStatementSetter.append(", "); } } } - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); queryBuilder.append(schemaName).append(".").append(tableName); queryBuilder.append(" SET "); @@ -549,18 +602,21 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol private String getFailedRecords(int[] results, List data) { List failedRecordsInfo = new ArrayList<>(); for (int k = 0; k < results.length; k++) { - // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or Statement.SUCCESS_NO_INFO + // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or + // Statement.SUCCESS_NO_INFO // usually indicates a failure or success without specific row count. // A common return value for success is 1 (for one row updated/inserted). if (results[k] < 1) { // Assuming 1 means success, and anything else (0, -2, etc.) means failure // Attempt to get some identifiable info from the failed record if (data.get(k).length > 0) { - failedRecordsInfo.add("Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); + failedRecordsInfo.add( + "Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); } else { failedRecordsInfo.add("Record at index " + k + " (No identifiable info)"); } } } + logger.info("Failed records info: {}", failedRecordsInfo); return String.join("; ", failedRecordsInfo); } } \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/utils/JwtUserIdValidationFilter.java b/src/main/java/com/iemr/mmu/utils/JwtUserIdValidationFilter.java index 1dba0e78..fbeab7ad 100644 --- a/src/main/java/com/iemr/mmu/utils/JwtUserIdValidationFilter.java +++ b/src/main/java/com/iemr/mmu/utils/JwtUserIdValidationFilter.java @@ -38,13 +38,10 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo String origin = request.getHeader("Origin"); - logger.debug("Incoming Origin: {}", origin); - logger.debug("Allowed Origins Configured: {}", allowedOrigins); - if (origin != null && isOriginAllowed(origin)) { response.setHeader("Access-Control-Allow-Origin", origin); response.setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS"); - response.setHeader("Access-Control-Allow-Headers", "Authorization, Content-Type, Accept, Jwttoken"); + response.setHeader("Access-Control-Allow-Headers", "Authorization, Content-Type, Accept, Jwttoken,serverAuthorization, ServerAuthorization, serverauthorization, Serverauthorization"); response.setHeader("Vary", "Origin"); response.setHeader("Access-Control-Allow-Credentials", "true"); } else { @@ -59,7 +56,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo String path = request.getRequestURI(); String contextPath = request.getContextPath(); - logger.info("JwtUserIdValidationFilter invoked for path: " + path); // Log cookies for debugging Cookie[] cookies = request.getCookies(); @@ -76,7 +72,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo // Log headers for debugging String jwtTokenFromHeader = request.getHeader("Jwttoken"); - logger.info("JWT token from header: "); // Skip login and public endpoints if (path.equals(contextPath + "/user/userAuthenticate") @@ -104,7 +99,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo return; } } else if (jwtFromHeader != null) { - logger.info("Validating JWT token from header"); if (jwtAuthenticationUtil.validateUserIdAndJwtToken(jwtFromHeader)) { AuthorizationHeaderRequestWrapper authorizationHeaderRequestWrapper = new AuthorizationHeaderRequestWrapper( request, ""); @@ -113,7 +107,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo } } else { String userAgent = request.getHeader("User-Agent"); - logger.info("User-Agent: " + userAgent); if (userAgent != null && isMobileClient(userAgent) && authHeader != null) { try { UserAgentContext.setUserAgent(userAgent); @@ -123,6 +116,7 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo } return; } + } logger.warn("No valid authentication token found"); @@ -156,7 +150,8 @@ private boolean isMobileClient(String userAgent) { if (userAgent == null) return false; userAgent = userAgent.toLowerCase(); - return userAgent.contains("okhttp"); // iOS (custom clients) + logger.info("User-Agent: " + userAgent); + return userAgent.contains("okhttp") || userAgent.contains("java/"); // iOS (custom clients) } private String getJwtTokenFromCookies(HttpServletRequest request) { @@ -179,4 +174,4 @@ private void clearUserIdCookie(HttpServletResponse response) { cookie.setMaxAge(0); // Invalidate the cookie response.addCookie(cookie); } -} +} \ No newline at end of file diff --git a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java index d1ea0efd..33556d22 100644 --- a/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java +++ b/src/main/java/com/iemr/mmu/utils/RestTemplateUtil.java @@ -39,11 +39,16 @@ public static HttpEntity createRequestEntity(Object body, String authori MultiValueMap headers = new LinkedMultiValueMap<>(); headers.add(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE + ";charset=utf-8"); - - if (authorization != null && !authorization.isEmpty()) { +logger.info("token: {}", jwtToken); + if (authorization != null && !authorization.isEmpty() && !jwtToken.equalsIgnoreCase("datasync")) { headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + authorization); } + if(authorization != null && !authorization.isEmpty() && jwtToken.equalsIgnoreCase("datasync")) + { + headers.add(HttpHeaders.AUTHORIZATION, authorization); + } + if (jwtToken == null || jwtToken.isEmpty()) { ServletRequestAttributes attrs = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); @@ -55,14 +60,11 @@ public static HttpEntity createRequestEntity(Object body, String authori logger.error("Error while getting JWT token from cookie: {}", e.getMessage()); } } - } + } - - if (jwtToken != null && !jwtToken.isEmpty()) { + if (jwtToken != null && !jwtToken.isEmpty() && !jwtToken.equalsIgnoreCase("datasync")) { headers.add(HttpHeaders.COOKIE, "Jwttoken=" + jwtToken); } - - return new HttpEntity<>(body, headers); } } \ No newline at end of file From 2828fe6b68035e4f0b7c1a9937312cc622cc91f5 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Mon, 11 Aug 2025 09:54:47 +0530 Subject: [PATCH 18/45] fix: remove the token for server authorization (#111) --- .../service/dataSyncActivity/DownloadDataFromServerImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index 252c9ce7..afb42a4b 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -186,7 +186,7 @@ private int downloadDataFromServer(SyncDownloadMaster syncDownloadMaster,String // initializing RestTemplate RestTemplate restTemplate = new RestTemplate(); // Provide the required second argument, e.g., an empty string or appropriate authorization token - HttpEntity request = RestTemplateUtil.createRequestEntity(syncDownloadMaster, ServerAuthorization, jwtToken); + HttpEntity request = RestTemplateUtil.createRequestEntity(syncDownloadMaster, ServerAuthorization, "datasync"); // Call rest-template to call API to download master data for given table ResponseEntity response = restTemplate.exchange(dataSyncDownloadUrl, HttpMethod.POST, request, String.class); From 604bd641da7f80d28bab6a9b7b167aab36ed3753 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Thu, 14 Aug 2025 10:42:43 +0530 Subject: [PATCH 19/45] Fix the datasync Demographics Issue (#112) * fix: remove condition for i_beneficiarydetails * fix: add logs * fix: add logs * fix: remove db_iemr * fix: add log for show column names too * fix: add date-format condition * fix: change valid column name * fix: change valid column name * fix: change valid column name * fix: change valid column name * fix: update insert query * fix: update cleaned column list * fix: date conversion * fix: conversion date-time * fix: add date conversion * fix: logs added * fix: new logger * fix: revert the date condition * fix: revert insert code * fix: revert insert code * fix: date format issue * fix: logs add * fix: log for group and group lsit * fix: clean the code --------- Co-authored-by: vishwab1 --- .../location/DistrictBlockMasterRepo.java | 2 +- .../dataSyncActivity/DataSyncRepository.java | 21 +- .../UploadDataToServerImpl.java | 17 +- .../DataSyncRepositoryCentral.java | 28 +- .../GetDataFromVanAndSyncToDBImpl.java | 454 ++++++++---------- 5 files changed, 242 insertions(+), 280 deletions(-) diff --git a/src/main/java/com/iemr/mmu/repo/location/DistrictBlockMasterRepo.java b/src/main/java/com/iemr/mmu/repo/location/DistrictBlockMasterRepo.java index db6a9d40..97452c65 100644 --- a/src/main/java/com/iemr/mmu/repo/location/DistrictBlockMasterRepo.java +++ b/src/main/java/com/iemr/mmu/repo/location/DistrictBlockMasterRepo.java @@ -36,7 +36,7 @@ public interface DistrictBlockMasterRepo extends CrudRepository getDistrictBlockMaster(@Param("districtID") Integer districtID); - @Query(value = " SELECT distinct StateID, StateName,WorkingDistrictID,WorkingDistrictName,blockid,blockname,villageid,villagename FROM db_iemr.v_userservicerolemapping WHERE UserID = :userId and UserServciceRoleDeleted is false",nativeQuery = true) + @Query(value = " SELECT distinct StateID, StateName,WorkingDistrictID,WorkingDistrictName,blockid,blockname,villageid,villagename FROM v_userservicerolemapping WHERE UserID = :userId and UserServciceRoleDeleted is false",nativeQuery = true) public List getUserservicerolemapping(@Param("userId") Integer userId); } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index 9b020a4e..f7a55d4a 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -88,7 +88,12 @@ public List> getDataForGivenSchemaAndTable(String schema, St } } - + + logger.info("Select Query started:"); + logger.info("Table Name: {}", table); + + logger.info("Select Query: {}", baseQuery); + resultSetList = jdbcTemplate.queryForList(baseQuery); return resultSetList; } @@ -98,14 +103,16 @@ public int updateProcessedFlagInVan(String schemaName, String tableName, StringB jdbcTemplate = getJdbcTemplate(); String query = ""; + logger.info("Updating processed flag in table: " + tableName + " for vanSerialNos: " + vanSerialNos); + if (tableName != null && tableName.toLowerCase().equals("i_ben_flow_outreach")) { - query = "UPDATE " + schemaName + "." + tableName - + " SET created_date = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " - + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + query = "UPDATE " + schemaName + "." + tableName + + " SET created_date = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; } else { - query = "UPDATE " + schemaName + "." + tableName - + " SET CreatedDate = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " - + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + query = "UPDATE " + schemaName + "." + tableName + + " SET CreatedDate = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; } Timestamp syncedDate = new Timestamp(System.currentTimeMillis()); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 2c24dc2c..51d8a32b 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -115,7 +115,7 @@ public String getDataToSyncToServer(int vanID, String user, String Authorization public String syncIntercepter(int vanID, String user, String Authorization, String token) throws Exception { // sync activity trigger - + String serverAcknowledgement = startDataSync(vanID, user, Authorization, token); return serverAcknowledgement; @@ -226,7 +226,7 @@ private String startDataSync(int vanID, String user, String Authorization, Strin Map response = new HashMap<>(); response.put("response", "Data sync failed"); response.put("groupsProgress", responseStatus); - objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(response); + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(response); return objectMapper.writerWithDefaultPrettyPrinter() .writeValueAsString(Collections.singletonMap("data", response)); } else { @@ -275,6 +275,7 @@ public List getVanAndServerColumnList(Integer groupID) throws private List> getDataToSync(String schemaName, String tableName, String columnNames) throws Exception { + logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); List> resultSetList = dataSyncRepository.getDataForGivenSchemaAndTable(schemaName, tableName, columnNames); if (resultSetList != null) { @@ -316,11 +317,11 @@ private List> getBatchOfAskedSizeDataToSync(List> dataToBesync, String user, String Authorization, String token) + String serverColumns, List> dataToBesync, String user, String Authorization, + String token) throws Exception { - + RestTemplate restTemplate = new RestTemplate(); - Integer facilityID = masterVanRepo.getFacilityID(vanID); @@ -340,10 +341,10 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S dataMap.put("facilityID", facilityID); String requestOBJ = gson.toJson(dataMap); - HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization,"datasync"); + HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, String.class); - + int i = 0; if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); @@ -392,4 +393,4 @@ public String getDataSyncGroupDetails() { return null; } -} \ No newline at end of file +} diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java index a5d8422d..6dba4611 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/DataSyncRepositoryCentral.java @@ -88,19 +88,33 @@ private boolean isValidTableName(String tableName) { } private boolean isValidColumnNamesList(String columnNames) { - if (columnNames == null || columnNames.trim().isEmpty()) { - return false; - } - for (String col : columnNames.split(",")) { - if (!isValidDatabaseIdentifierCharacter(col.trim())) { - return false; + if (columnNames == null || columnNames.trim().isEmpty()) { + return false; + } + logger.info("Validating column names: {}", columnNames); + for (String col : columnNames.split(",")) { + String trimmed = col.trim(); + + // Handle date_format(...) style + if (trimmed.toLowerCase().startsWith("date_format(")) { + int openParenIndex = trimmed.indexOf("("); + int commaIndex = trimmed.indexOf(",", openParenIndex); + if (commaIndex > 0) { + trimmed = trimmed.substring(openParenIndex + 1, commaIndex).trim(); } } - return true; + + if (!isValidDatabaseIdentifierCharacter(trimmed)) { + return false; + } } + return true; +} + public int checkRecordIsAlreadyPresentOrNot(String schemaName, String tableName, String vanSerialNo, String vanID, String vanAutoIncColumnName, int syncFacilityID) { + jdbcTemplate = getJdbcTemplate(); List params = new ArrayList<>(); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index cd33c2b5..fcc68fd3 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -27,7 +27,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,7 +45,6 @@ public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB @Autowired private DataSyncRepositoryCentral dataSyncRepositoryCentral; - private static final Map> TABLE_GROUPS = new HashMap<>(); static { TABLE_GROUPS.put(1, @@ -96,14 +94,15 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E ObjectMapper mapper = new ObjectMapper(); SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); -List> dataToBesync = syncUploadDataDigester.getSyncData(); + List> dataToBesync = syncUploadDataDigester.getSyncData(); + logger.info("Data to be synced: {}", dataToBesync); if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { logger.error("Invalid SyncUploadDataDigester object or tableName is null."); return "Error: Invalid sync request."; } String syncTableName = syncUploadDataDigester.getTableName(); - + logger.info("Syncing data for table: {}", syncTableName); // Handle specific tables first, if their logic is distinct if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester); @@ -113,29 +112,15 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); return "Sync failed for m_beneficiaryregidmapping."; } - } else if ("i_beneficiarydetails".equalsIgnoreCase(syncTableName)) { - String result = update_I_BeneficiaryDetails_for_processed_in_batches(syncUploadDataDigester); - if ("data sync passed".equals(result)) { - return "Sync successful for i_beneficiarydetails."; - } else { - logger.error("Sync failed for i_beneficiarydetails: {}", result); - return "Sync failed for i_beneficiarydetails."; - } - } else { - // Determine the group for the current table or iterate through all if no - // specific table is given + } + else { boolean syncSuccess = true; String errorMessage = ""; - - // If a specific table is provided in the request, try to find its group and - // sync only that table. - // Otherwise, iterate through all defined groups. if (syncTableName != null && !syncTableName.isEmpty()) { boolean foundInGroup = false; - - for (Map map : dataToBesync) { - // if (entry.getValue().contains(syncTableName.toLowerCase())) { - if(map.get("tableName") != null + + for (Map map : dataToBesync) { + if (map.get("tableName") != null && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, syncUploadDataDigester); @@ -149,26 +134,13 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E syncSuccess = performGenericTableSync(syncUploadDataDigester); } } else { - // If no specific table is in the request (e.g., a general sync trigger), - // iterate through groups - logger.info("No specific table provided. Attempting to sync all tables group by group."); + for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { Integer groupId = entry.getKey(); List tablesInGroup = entry.getValue(); - logger.info("Starting sync for Group {}", groupId); for (String table : tablesInGroup) { try { - // Create a new digester for each table within the group, - // or adapt if the original digester contains data for multiple tables. - // For simplicity, assuming syncDataDigester needs to be tailored per table or - // group. - // If your requestOBJ contains data for only one table at a time, this loop - // might need adjustment - // to fetch data for each table in the group. - // For now, it will use the syncData from the original requestOBJ, which implies - // the original requestOBJ should represent data for a single table. - // A more robust solution would involve fetching data for each table - // dynamically. + boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), table, syncUploadDataDigester); if (!currentTableSyncResult) { @@ -176,9 +148,7 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, errorMessage); - // Optionally, you can choose to break here or continue to sync other tables in - // the group/next group - // For now, let's continue to attempt other tables within the group. + } else { logger.info("Successfully synced table: {} in Group {}", table, groupId); } @@ -188,7 +158,6 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E + e.getMessage() + ". "; logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, e.getMessage(), e); - // Continue to attempt other tables } } } @@ -202,41 +171,22 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E } } - /** - * Helper method to sync tables belonging to a specific group. - * This method assumes that the `syncUploadDataDigester` will be populated - * with relevant data for the `currentTableName` before calling this. - * In a real-world scenario, you might fetch data for each table here. - */ private boolean syncTablesInGroup(String schemaName, String currentTableName, SyncUploadDataDigester originalDigester) { - logger.info("Attempting generic sync for table: {}", currentTableName); - // This is a simplification. In a production system, you would likely need - // to retrieve the actual data for 'currentTableName' from the local DB - // based on the group sync approach. For this example, we'll assume the - // originalDigester's syncData is relevant or needs to be re-populated. - - // Create a new digester instance or modify the existing one for the current - // table SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); tableSpecificDigester.setSchemaName(schemaName); tableSpecificDigester.setTableName(currentTableName); tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); - tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); // Assuming serverColumns is - // generic or set per table - - // !!! IMPORTANT: You'll need to fetch the data for 'currentTableName' from your local DB here. - // The `originalDigester.getSyncData()` might not be correct for all tables in a group. - // For demonstration, I'm just using the original digester's data, which is likely incorrect + tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); tableSpecificDigester.setSyncData(originalDigester.getSyncData()); return performGenericTableSync(tableSpecificDigester); } private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( SyncUploadDataDigester syncUploadDataDigester) { - + List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncData = new ArrayList<>(); @@ -261,7 +211,8 @@ private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( if (!syncData.isEmpty()) { try { int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, syncData); + syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, + syncData); if (i.length == syncData.size()) { logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); @@ -309,10 +260,10 @@ public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDat String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly - while (true) { - List> batch; - try { - + while (true) { + List> batch; + try { + batch = dataSyncRepositoryCentral.getBatchForBenDetails( syncUploadDataDigester, problematicWhereClause, @@ -350,210 +301,215 @@ private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableN * Handles the generic synchronization logic for tables not covered by specific * handlers. */ - private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncDataListInsert = new ArrayList<>(); - List syncDataListUpdate = new ArrayList<>(); - if (dataToBesync == null || dataToBesync.isEmpty()) { - logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); - return true; // Nothing to sync, consider it a success - } + private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncDataListInsert = new ArrayList<>(); + List syncDataListUpdate = new ArrayList<>(); - String syncTableName = syncUploadDataDigester.getTableName(); - String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); - String schemaName = syncUploadDataDigester.getSchemaName(); - Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + if (dataToBesync == null || dataToBesync.isEmpty()) { + logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); + return true; // Nothing to sync, consider it a success + } - for (Map map : dataToBesync) { - String vanSerialNo = String.valueOf(map.get(vanAutoIncColumnName)); - String vanID = String.valueOf(map.get("VanID")); - int syncFacilityID = 0; - - // Update SyncedBy and SyncedDate in the xmap itself before processing - map.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); - map.put("SyncedDate", String.valueOf(LocalDateTime.now())); // Ensure column name matches DB - if (map.get("CreatedDate") == null || map.get("created_date") == null) { - logger.info("CreatedDate was null for table: " + syncTableName + ", inserting current time"); - if(map.get("CreatedDate") == null) - map.put("CreatedDate", String.valueOf(LocalDateTime.now())); - if(map.get("created_date") == null) - map.put("created_date", String.valueOf(LocalDateTime.now())); + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String schemaName = syncUploadDataDigester.getSchemaName(); + Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + String serverColumns = syncUploadDataDigester.getServerColumns(); + + List serverColumnsList = Arrays.asList(serverColumns.split(",")); + + for (Map map : dataToBesync) { + // Create a new map with clean column names as keys + Map cleanRecord = new HashMap<>(); + for (String key : map.keySet()) { + String cleanKey = key; + // Handle keys with SQL functions like date_format + if (key.startsWith("date_format(") && key.endsWith(")")) { + int start = key.indexOf("(") + 1; + int end = key.indexOf(","); + if (end > start) { + cleanKey = key.substring(start, end).trim(); + } else { + // Fallback if format is unexpected + cleanKey = key.substring(start, key.indexOf(")")).trim(); } - // Facility ID processing - if (facilityIDFromDigester != null) { - // Determine the 'Processed' status based on facility ID for specific tables - switch (syncTableName.toLowerCase()) { - case "t_indent": - case "t_indentorder": { - if (map.containsKey("FromFacilityID") && map.get("FromFacilityID") instanceof Double) { - Double fromFacilityID = (Double) map.get("FromFacilityID"); - if (fromFacilityID.intValue() == facilityIDFromDigester) { - map.put("Processed", "P"); - } + } + cleanRecord.put(cleanKey.trim(), map.get(key)); + } + + String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); + String vanID = String.valueOf(cleanRecord.get("VanID")); + int syncFacilityID = 0; + + // Update SyncedBy and SyncedDate in the xmap itself before processing + cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); + + if (facilityIDFromDigester != null) { + // Determine the 'Processed' status based on facility ID for specific tables + switch (syncTableName.toLowerCase()) { + case "t_indent": + case "t_indentorder": { + if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { + Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); + if (fromFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_indentissue": { - if (map.containsKey("ToFacilityID") && map.get("ToFacilityID") instanceof Double) { - Double toFacilityID = (Double) map.get("ToFacilityID"); - if (toFacilityID.intValue() == facilityIDFromDigester) { - map.put("Processed", "P"); - } + break; + } + case "t_indentissue": { + if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { + Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); + if (toFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_stocktransfer": { - if (map.containsKey("TransferToFacilityID") - && map.get("TransferToFacilityID") instanceof Double) { - Double transferToFacilityID = (Double) map.get("TransferToFacilityID"); - if (transferToFacilityID.intValue() == facilityIDFromDigester) { - map.put("Processed", "P"); - } + break; + } + case "t_stocktransfer": { + if (cleanRecord.containsKey("TransferToFacilityID") + && cleanRecord.get("TransferToFacilityID") instanceof Number) { + Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); + if (transferToFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_itemstockentry": { - if (map.containsKey("FacilityID") && map.get("FacilityID") instanceof Double) { - Double mapFacilityID = (Double) map.get("FacilityID"); - if (mapFacilityID.intValue() == facilityIDFromDigester) { - map.put("Processed", "P"); - } + break; + } + case "t_itemstockentry": { + if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { + Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); + if (mapFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - default: - // No specific facility ID logic for other tables, maintain existing 'Processed' - // status or default - break; + break; } + default: + // No specific facility ID logic for other tables + break; } + } - // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot - if (map.containsKey("SyncFacilityID") && map.get("SyncFacilityID") instanceof Integer) { - syncFacilityID = (Integer) map.get("SyncFacilityID"); - } else if (map.containsKey("SyncFacilityID") && map.get("SyncFacilityID") instanceof Double) { - syncFacilityID = ((Double) map.get("SyncFacilityID")).intValue(); - } + // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { + syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); + } - int recordCheck; - try { - recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); - } catch (Exception e) { - logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", - syncTableName, vanSerialNo, vanID, e.getMessage(), e); - return false; // Critical error, stop sync for this table - } + int recordCheck; + try { + recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + } catch (Exception e) { + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", + syncTableName, vanSerialNo, vanID, e.getMessage(), e); + return false; // Critical error, stop sync for this table + } - // Prepare Object array for insert/update - Object[] objArr; - List serverColumnsList = Arrays.asList(syncUploadDataDigester.getServerColumns().split(",")); - List currentRecordValues = new ArrayList<>(); - - for (String column : serverColumnsList) { - Object value = map.get(column.trim()); - // Handle boolean conversion if necessary, though String.valueOf should - // generally work for prepared statements - if (value instanceof Boolean) { - currentRecordValues.add(value); - } else if (value != null) { - currentRecordValues.add(String.valueOf(value)); - } else { - currentRecordValues.add(null); - } + // Prepare Object array for insert/update + List currentRecordValues = new ArrayList<>(); + for (String column : serverColumnsList) { + Object value = cleanRecord.get(column.trim()); + if (value instanceof Boolean) { + currentRecordValues.add(value); + } else if (value != null) { + currentRecordValues.add(String.valueOf(value)); + } else { + currentRecordValues.add(null); } + } - objArr = currentRecordValues.toArray(); - - if (recordCheck == 0) { - syncDataListInsert.add(objArr); + Object[] objArr = currentRecordValues.toArray(); + if (recordCheck == 0) { + syncDataListInsert.add(objArr); + } else { + // For update, append the WHERE clause parameters at the end of the array + List updateParams = new ArrayList<>(Arrays.asList(objArr)); + updateParams.add(String.valueOf(vanSerialNo)); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { + updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); } else { - // For update, append the WHERE clause parameters at the end of the array - List updateParams = new ArrayList<>(Arrays.asList(objArr)); - updateParams.add(String.valueOf(vanSerialNo)); - - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(syncTableName.toLowerCase()) && map.containsKey("SyncFacilityID")) { - updateParams.add(String.valueOf(map.get("SyncFacilityID"))); - } else { - updateParams.add(String.valueOf(vanID)); - } - syncDataListUpdate.add(updateParams.toArray()); + updateParams.add(String.valueOf(vanID)); } + syncDataListUpdate.add(updateParams.toArray()); } + } - boolean insertSuccess = true; - boolean updateSuccess = true; + boolean insertSuccess = true; + boolean updateSuccess = true; - if (!syncDataListInsert.isEmpty()) { - String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, syncUploadDataDigester.getServerColumns()); - - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - syncUploadDataDigester.getServerColumns(), queryInsert, syncDataListInsert); - if (i.length != syncDataListInsert.size()) { - insertSuccess = false; - logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", - syncTableName, syncDataListInsert.size(), i.length, - getFailedRecords(i, syncDataListInsert)); - } else { - logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); - } - } catch (Exception e) { + if (!syncDataListInsert.isEmpty()) { + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + serverColumns, queryInsert, syncDataListInsert); + if (i.length != syncDataListInsert.size()) { insertSuccess = false; - logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", + syncTableName, syncDataListInsert.size(), i.length, + getFailedRecords(i, syncDataListInsert)); + } else { + logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); } + } catch (Exception e) { + insertSuccess = false; + logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); } + } - if (!syncDataListUpdate.isEmpty()) { - String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, syncUploadDataDigester.getServerColumns(), syncTableName); - // Ensure the update query is correct and matches the expected format - try { - int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); - if (j.length != syncDataListUpdate.size()) { - updateSuccess = false; - logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", - syncTableName, syncDataListUpdate.size(), j.length, - getFailedRecords(j, syncDataListUpdate)); - } else { - logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); - } - } catch (Exception e) { + if (!syncDataListUpdate.isEmpty()) { + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + try { + int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + if (j.length != syncDataListUpdate.size()) { updateSuccess = false; - logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", + syncTableName, syncDataListUpdate.size(), j.length, + getFailedRecords(j, syncDataListUpdate)); + } else { + logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); } + } catch (Exception e) { + updateSuccess = false; + logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); } - return insertSuccess && updateSuccess; + } + return insertSuccess && updateSuccess; +} + private String getQueryToInsertDataToServerDB(String schemaName, String + tableName, String serverColumns) { + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); + + StringBuilder preparedStatementSetter = new StringBuilder(); + + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } } - private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); - - StringBuilder preparedStatementSetter = new StringBuilder(); - - if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append("?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } - } - } - - StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append("("); - queryBuilder.append(serverColumns); - queryBuilder.append(") VALUES ("); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(")"); - return queryBuilder.toString(); + StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append("("); + queryBuilder.append(serverColumns); + queryBuilder.append(") VALUES ("); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(")"); + return queryBuilder.toString(); } public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { @@ -563,24 +519,6 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol StringBuilder preparedStatementSetter = new StringBuilder(); - if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - String columnName = columnsArr[i].trim(); // ← NEW LINE - - // Special handling for CreatedDate - use COALESCE to prevent NULL - if (columnName.equalsIgnoreCase("CreatedDate")) { // ← NEW BLOCK - preparedStatementSetter.append(columnName); - preparedStatementSetter.append(" = COALESCE(?, CURRENT_TIMESTAMP)"); - } else { - preparedStatementSetter.append(columnName); - preparedStatementSetter.append(" = ?"); - } - - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } - } - } StringBuilder queryBuilder = new StringBuilder(" UPDATE "); queryBuilder.append(schemaName).append(".").append(tableName); queryBuilder.append(" SET "); @@ -619,4 +557,6 @@ private String getFailedRecords(int[] results, List data) { logger.info("Failed records info: {}", failedRecordsInfo); return String.join("; ", failedRecordsInfo); } + + } \ No newline at end of file From 62a5a18b9d1f2a73c7d5ee47e08a3a54e3cdd0b9 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Tue, 19 Aug 2025 22:19:09 +0530 Subject: [PATCH 20/45] Fix the token issue for Ben-gen id generation (#114) * fix: update server authorization for bengen * fix: update server authorization for bengen --- .../service/dataSyncActivity/DownloadDataFromServerImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index afb42a4b..dd389c38 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -333,7 +333,7 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri int i = 0; // Rest template RestTemplate restTemplate = new RestTemplate(); - HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization,token); + HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, ServerAuthorization,"datasync"); // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response = restTemplate.exchange(benGenUrlCentral, HttpMethod.POST, request, String.class); @@ -342,7 +342,7 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("data") && obj.has("statusCode") && obj.getInt("statusCode") == 200) { // Consume the response from API and call local identity api to save data - HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, token); + HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), ServerAuthorization, "datasync"); i = 1; // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response1 = restTemplate.exchange(benImportUrlLocal, HttpMethod.POST, request1, From d592f6a5884f9f412414b3f535b18b9d004b6599 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 20 Aug 2025 10:59:03 +0530 Subject: [PATCH 21/45] fix: replace authorization for local api call (#116) --- .../service/dataSyncActivity/DownloadDataFromServerImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index dd389c38..13070e18 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -342,7 +342,7 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("data") && obj.has("statusCode") && obj.getInt("statusCode") == 200) { // Consume the response from API and call local identity api to save data - HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), ServerAuthorization, "datasync"); + HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, "datasync"); i = 1; // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response1 = restTemplate.exchange(benImportUrlLocal, HttpMethod.POST, request1, From 26bf74c1fb1455a7f36aeae89a6bb1bdb61ffd62 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Wed, 20 Aug 2025 15:33:47 +0530 Subject: [PATCH 22/45] fix: add logs (#117) --- .../service/dataSyncActivity/DownloadDataFromServerImpl.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index 13070e18..dbaadbad 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -337,11 +337,13 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response = restTemplate.exchange(benGenUrlCentral, HttpMethod.POST, request, String.class); - +logger.info("Respponse from central API: " + response); +logger.info("Import url="+benImportUrlLocal); if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("data") && obj.has("statusCode") && obj.getInt("statusCode") == 200) { // Consume the response from API and call local identity api to save data + HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, "datasync"); i = 1; // Call rest-template to call central API to generate UNIQUE ID at central From 9f7fb1cc69c89bf63b41dc3fbc426f303a370a61 Mon Sep 17 00:00:00 2001 From: Vanitha S <116701245+vanitha1822@users.noreply.github.com> Date: Fri, 22 Aug 2025 11:14:32 +0530 Subject: [PATCH 23/45] Fix the BenGen ID Issue (#118) * fix: add logs to check the identity-api * fix: add logs * fix: add logs --- .../dataSyncActivity/StartSyncActivity.java | 5 +++-- .../DownloadDataFromServerImpl.java | 21 ++++++++++++------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/main/java/com/iemr/mmu/controller/dataSyncActivity/StartSyncActivity.java b/src/main/java/com/iemr/mmu/controller/dataSyncActivity/StartSyncActivity.java index 5f08fa9d..1e43ed66 100644 --- a/src/main/java/com/iemr/mmu/controller/dataSyncActivity/StartSyncActivity.java +++ b/src/main/java/com/iemr/mmu/controller/dataSyncActivity/StartSyncActivity.java @@ -25,7 +25,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; - import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; @@ -37,8 +36,8 @@ import com.iemr.mmu.service.dataSyncActivity.DownloadDataFromServerImpl; import com.iemr.mmu.service.dataSyncActivity.DownloadDataFromServerTransactionalImpl; import com.iemr.mmu.service.dataSyncActivity.UploadDataToServerImpl; -import com.iemr.mmu.utils.response.OutputResponse; import com.iemr.mmu.utils.CookieUtil; +import com.iemr.mmu.utils.response.OutputResponse; import io.swagger.v3.oas.annotations.Operation; import jakarta.servlet.http.HttpServletRequest; @@ -181,6 +180,8 @@ public String callCentralAPIToGenerateBenIDAndimportToLocal(@RequestBody String OutputResponse response = new OutputResponse(); try { String jwtToken = CookieUtil.getJwtTokenFromCookie(request); + logger.info("Authorization from controller="+ authorization); + int i = downloadDataFromServerImpl.callCentralAPIToGenerateBenIDAndimportToLocal(requestOBJ, authorization, serverAuthorization, jwtToken); if (i == 0) { diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java index dbaadbad..6e7b1c91 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DownloadDataFromServerImpl.java @@ -330,25 +330,29 @@ public Map getDownloadStatus() { public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, String Authorization, String ServerAuthorization, String token) throws Exception { - int i = 0; + int i = 0, i1 = 0; + try{ // Rest template RestTemplate restTemplate = new RestTemplate(); HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, ServerAuthorization,"datasync"); // Call rest-template to call central API to generate UNIQUE ID at central ResponseEntity response = restTemplate.exchange(benGenUrlCentral, HttpMethod.POST, request, String.class); -logger.info("Respponse from central API: " + response); -logger.info("Import url="+benImportUrlLocal); + logger.info("Authorization before calling local api="+Authorization); + logger.info("Import url="+benImportUrlLocal); if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); if (obj != null && obj.has("data") && obj.has("statusCode") && obj.getInt("statusCode") == 200) { // Consume the response from API and call local identity api to save data - HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, "datasync"); + logger.info("Authorization: " + Authorization); + logger.info("ServerAuthorization: " + ServerAuthorization); + HttpEntity request1 = RestTemplateUtil.createRequestEntity(obj.get("data").toString(), Authorization, token); i = 1; - // Call rest-template to call central API to generate UNIQUE ID at central + logger.info("Request to benImporturllocal: " + request1); ResponseEntity response1 = restTemplate.exchange(benImportUrlLocal, HttpMethod.POST, request1, String.class); + logger.info("Response from benImportUrlLocal: " + response1); if (response1 != null && response1.hasBody()) { JSONObject obj1 = new JSONObject(response1.getBody()); if (obj1 != null && obj1.has("data") && obj1.has("statusCode") @@ -357,9 +361,12 @@ public int callCentralAPIToGenerateBenIDAndimportToLocal(String requestOBJ, Stri } } - } + } } - + } catch (Exception e) { + logger.error("Error while generating catch UNIQUE_ID at central server: " + e.getMessage()); + throw new Exception("Error while generating catch UNIQUE_ID at central server: " + e.getMessage()); + } return i; } } From 676336796f09251b91bfa28ec3ff4459230f7f52 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 12:44:26 +0530 Subject: [PATCH 24/45] fix: add logs --- .../dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index fcc68fd3..64121ba3 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -533,6 +533,7 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol } else { queryBuilder.append(" AND VanID = ? "); } + logger.info("Test Query Builder: {}", queryBuilder.toString()); return queryBuilder.toString(); } From b56ff4686ad0855698329fd0aac17821a055238a Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 13:01:25 +0530 Subject: [PATCH 25/45] fix: add logs for checking --- .../GetDataFromVanAndSyncToDBImpl.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 64121ba3..8d805020 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -447,10 +447,12 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig if (!syncDataListInsert.isEmpty()) { String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); - +logger.info("Query Insert="+queryInsert); try { int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); + logger.info("Insert result array length: {}", i.length); + logger.info("Expected insert size: {}", syncDataListInsert.size()); if (i.length != syncDataListInsert.size()) { insertSuccess = false; logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", @@ -461,15 +463,19 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } } catch (Exception e) { insertSuccess = false; + logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListInsert)); logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); } } if (!syncDataListUpdate.isEmpty()) { String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + logger.info("Query Update="+queryUpdate); try { int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + logger.info("Update result array length: {}", j.length); + logger.info("Expected update size: {}", syncDataListUpdate.size()); if (j.length != syncDataListUpdate.size()) { updateSuccess = false; logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", @@ -480,6 +486,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } } catch (Exception e) { updateSuccess = false; + logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListUpdate)); logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); } } @@ -509,6 +516,7 @@ private String getQueryToInsertDataToServerDB(String schemaName, String queryBuilder.append(") VALUES ("); queryBuilder.append(preparedStatementSetter); queryBuilder.append(")"); + logger.info("Test Query Builder: {}", queryBuilder.toString()); return queryBuilder.toString(); } @@ -539,6 +547,7 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol // Helper to get information about failed records (for logging purposes) private String getFailedRecords(int[] results, List data) { + logger.info("Inside get Failed Records"); List failedRecordsInfo = new ArrayList<>(); for (int k = 0; k < results.length; k++) { // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or From ed82ff1117a45eb4d770291eaef76e612c39cdf8 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 13:22:43 +0530 Subject: [PATCH 26/45] fix: update the prepare statement --- .../GetDataFromVanAndSyncToDBImpl.java | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 8d805020..253a7d2c 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -527,7 +527,17 @@ public String getQueryToUpdateDataToServerDB(String schemaName, String serverCol StringBuilder preparedStatementSetter = new StringBuilder(); - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + String column = columnsArr[i].trim(); + preparedStatementSetter.append(column).append(" = ?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } + } + + StringBuilder queryBuilder = new StringBuilder("UPDATE "); queryBuilder.append(schemaName).append(".").append(tableName); queryBuilder.append(" SET "); queryBuilder.append(preparedStatementSetter); From 3aafc86f52d9bc8f279193ae48aefbd8493f99ba Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 13:26:25 +0530 Subject: [PATCH 27/45] fix: add log --- .../dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 253a7d2c..05983a2b 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -403,6 +403,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig try { recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + logger.info("Test Record check="+recordCheck); } catch (Exception e) { logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", syncTableName, vanSerialNo, vanID, e.getMessage(), e); @@ -423,6 +424,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } Object[] objArr = currentRecordValues.toArray(); + logger.info("Test Obj Arr-",objArr); if (recordCheck == 0) { syncDataListInsert.add(objArr); } else { From 20db96255b972c9ef919b0c534040ccc961b40ef Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 13:28:20 +0530 Subject: [PATCH 28/45] fix: add log --- .../dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 05983a2b..22b272fc 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -404,6 +404,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); logger.info("Test Record check="+recordCheck); + logger.info("Test all the data="+ schemaName ":: Tble="+ syncTableName+":: vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: vanAutoIncColumnName="+vanAutoIncColumnName+":: syncFacilityID="+syncFacilityID); } catch (Exception e) { logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", syncTableName, vanSerialNo, vanID, e.getMessage(), e); From 53207f55e5158d7d37414a93bf34cdca3ad68718 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 13:34:50 +0530 Subject: [PATCH 29/45] fix: add logs --- .../dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 22b272fc..0cbf9f09 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -404,7 +404,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); logger.info("Test Record check="+recordCheck); - logger.info("Test all the data="+ schemaName ":: Tble="+ syncTableName+":: vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: vanAutoIncColumnName="+vanAutoIncColumnName+":: syncFacilityID="+syncFacilityID); + logger.info("Test all the data="+ schemaName +":: Tble="+ syncTableName+":: vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: vanAutoIncColumnName="+vanAutoIncColumnName+":: syncFacilityID="+syncFacilityID); } catch (Exception e) { logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", syncTableName, vanSerialNo, vanID, e.getMessage(), e); @@ -425,7 +425,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } Object[] objArr = currentRecordValues.toArray(); - logger.info("Test Obj Arr-",objArr); + logger.info("Test Obj Arr {}",objArr); if (recordCheck == 0) { syncDataListInsert.add(objArr); } else { From 4c883bf706a1c03f0b78fc0e61549745f2ad040b Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 15:52:12 +0530 Subject: [PATCH 30/45] fix: add Sync Result --- .../CancerGynecologicalExamination.java | 1 - .../service/dataSyncActivity/SyncResult.java | 46 +++++++ .../UploadDataToServerImpl.java | 4 +- .../GetDataFromVanAndSyncToDBImpl.java | 114 +++++++++++++----- 4 files changed, 130 insertions(+), 35 deletions(-) create mode 100644 src/main/java/com/iemr/mmu/service/dataSyncActivity/SyncResult.java diff --git a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java index 4e96ba30..dc916f29 100644 --- a/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java +++ b/src/main/java/com/iemr/mmu/data/doctor/CancerGynecologicalExamination.java @@ -39,7 +39,6 @@ import com.google.gson.annotations.Expose; import lombok.Data; -import lombok.Data; @Entity @Data diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/SyncResult.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/SyncResult.java new file mode 100644 index 00000000..b51a536d --- /dev/null +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/SyncResult.java @@ -0,0 +1,46 @@ +/* +* AMRIT – Accessible Medical Records via Integrated Technology +* Integrated EHR (Electronic Health Records) Solution +* +* Copyright (C) "Piramal Swasthya Management and Research Institute" +* +* This file is part of AMRIT. +* +* This program is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with this program. If not, see https://www.gnu.org/licenses/. +*/ +package com.iemr.mmu.service.dataSyncActivity; + +import lombok.Data; + +@Data +public class SyncResult { + private String schemaName; + private String tableName; + private String vanSerialNo; + private String syncedBy; + private boolean success; + private String reason; // Failure reason if any + + // Constructor + public SyncResult(String schemaName, String tableName, String vanSerialNo, String syncedBy, boolean success, String reason) { + this.schemaName = schemaName; + this.tableName = tableName; + this.vanSerialNo = vanSerialNo; + this.syncedBy = syncedBy; + this.success = success; + this.reason = reason; + } + + // Getters & setters omitted for brevity +} diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 51d8a32b..93ce0505 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -341,10 +341,12 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S dataMap.put("facilityID", facilityID); String requestOBJ = gson.toJson(dataMap); + logger.info("Request obj="+requestOBJ); HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, String.class); - +logger.info("Response for thes erver="+response); +logger.info("Response body="+response.getBody()); int i = 0; if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 0cbf9f09..36ecb7ea 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -35,6 +35,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; +import com.iemr.mmu.service.dataSyncActivity.SyncResult; @Service public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { @@ -168,6 +169,13 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E } else { return "Overall data sync failed. Details: " + errorMessage; } +// Map responseMap = new HashMap<>(); +// responseMap.put("statusCode", 200); +// responseMap.put("message", "Data sync completed"); +// responseMap.put("records", syncResults); +// logger.info("Response = "+responseMap); +// return new ObjectMapper().writeValueAsString(responseMap); + } } @@ -306,6 +314,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncDataListInsert = new ArrayList<>(); List syncDataListUpdate = new ArrayList<>(); + List syncResults = new ArrayList<>(); if (dataToBesync == null || dataToBesync.isEmpty()) { logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); @@ -318,6 +327,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); String serverColumns = syncUploadDataDigester.getServerColumns(); + int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); + List serverColumnsList = Arrays.asList(serverColumns.split(",")); for (Map map : dataToBesync) { @@ -428,6 +439,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig logger.info("Test Obj Arr {}",objArr); if (recordCheck == 0) { syncDataListInsert.add(objArr); + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); } else { // For update, append the WHERE clause parameters at the end of the array List updateParams = new ArrayList<>(Arrays.asList(objArr)); @@ -442,6 +454,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig updateParams.add(String.valueOf(vanID)); } syncDataListUpdate.add(updateParams.toArray()); + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); + } } @@ -451,48 +465,82 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig if (!syncDataListInsert.isEmpty()) { String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); logger.info("Query Insert="+queryInsert); - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - serverColumns, queryInsert, syncDataListInsert); - logger.info("Insert result array length: {}", i.length); - logger.info("Expected insert size: {}", syncDataListInsert.size()); - if (i.length != syncDataListInsert.size()) { - insertSuccess = false; - logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", - syncTableName, syncDataListInsert.size(), i.length, - getFailedRecords(i, syncDataListInsert)); - } else { - logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); - } - } catch (Exception e) { - insertSuccess = false; - logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListInsert)); - logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); +try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); + for (int k = 0; k < i.length; k++) { + if (i[k] < 1) { + syncResults.set(k, new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), // VanSerialNo position + syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); } } +} catch (Exception e) { + for (int k = 0; k < syncDataListInsert.size(); k++) { + syncResults.add(new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), + syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); + } +} + + // try { + // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + // serverColumns, queryInsert, syncDataListInsert); + // logger.info("Insert result array length: {}", i.length); + // logger.info("Expected insert size: {}", syncDataListInsert.size()); + // if (i.length != syncDataListInsert.size()) { + // insertSuccess = false; + // logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", + // syncTableName, syncDataListInsert.size(), i.length, + // getFailedRecords(i, syncDataListInsert)); + // } else { + // logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); + // } + // } catch (Exception e) { + // insertSuccess = false; + // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListInsert)); + // logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + // } + } if (!syncDataListUpdate.isEmpty()) { String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); logger.info("Query Update="+queryUpdate); try { - int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); - logger.info("Update result array length: {}", j.length); - logger.info("Expected update size: {}", syncDataListUpdate.size()); - if (j.length != syncDataListUpdate.size()) { - updateSuccess = false; - logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", - syncTableName, syncDataListUpdate.size(), j.length, - getFailedRecords(j, syncDataListUpdate)); - } else { - logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); - } - } catch (Exception e) { - updateSuccess = false; - logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListUpdate)); - logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryUpdate, syncDataListUpdate); + for (int k = 0; k < i.length; k++) { + if (i[k] < 1) { + syncResults.set(k, new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), // VanSerialNo position + syncUploadDataDigester.getSyncedBy(), false, "Update failed")); } } +} catch (Exception e) { + for (int k = 0; k < syncDataListUpdate.size(); k++) { + syncResults.add(new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), + syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); + } +} + + // try { + // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + // SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + // logger.info("Update result array length: {}", j.length); + // logger.info("Expected update size: {}", syncDataListUpdate.size()); + // if (j.length != syncDataListUpdate.size()) { + // updateSuccess = false; + // logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", + // syncTableName, syncDataListUpdate.size(), j.length, + // getFailedRecords(j, syncDataListUpdate)); + // } else { + // logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); + // } + // } catch (Exception e) { + // updateSuccess = false; + // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListUpdate)); + // logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + // } + } return insertSuccess && updateSuccess; } private String getQueryToInsertDataToServerDB(String schemaName, String From 8de6c104ad5f25b8bf03561bc06e27f6a2e00912 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 15:54:51 +0530 Subject: [PATCH 31/45] fix: add logs --- .../dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 36ecb7ea..89a1351f 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -507,6 +507,8 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig logger.info("Query Update="+queryUpdate); try { int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryUpdate, syncDataListUpdate); + logger.info("Test Update result array length: {}", i.length); + logger.info("Test Expected update size: {}", syncDataListUpdate.size()); for (int k = 0; k < i.length; k++) { if (i[k] < 1) { syncResults.set(k, new SyncResult(schemaName, syncTableName, From b303fdfafd3bb4ca0fe115b9a44f802ec9d0dcc4 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 16:25:05 +0530 Subject: [PATCH 32/45] fix: add syncResults --- .../GetDataFromVanAndSyncToDBImpl.java | 1097 +++++++++-------- 1 file changed, 550 insertions(+), 547 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 89a1351f..74031692 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -1,635 +1,638 @@ -/* -* AMRIT – Accessible Medical Records via Integrated Technology -* Integrated EHR (Electronic Health Records) Solution -* -* Copyright (C) "Piramal Swasthya Management and Research Institute" -* -* This file is part of AMRIT. -* -* This program is free software: you can redistribute it and/or modify -* it under the terms of the GNU General Public License as published by -* the Free Software Foundation, either version 3 of the License, or -* (at your option) any later version. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -* GNU General Public License for more details. -* -* You should have received a copy of the GNU General Public License -* along with this program. If not, see https://www.gnu.org/licenses/. -*/ -package com.iemr.mmu.service.dataSyncLayerCentral; - -import java.time.LocalDateTime; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; -import com.iemr.mmu.service.dataSyncActivity.SyncResult; - -@Service -public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { - - private static final String SERVER_COLUMNS_NOT_REQUIRED = null; // Renamed for clarity - private static final Logger logger = LoggerFactory.getLogger(GetDataFromVanAndSyncToDBImpl.class); - - @Autowired - private DataSyncRepositoryCentral dataSyncRepositoryCentral; - - private static final Map> TABLE_GROUPS = new HashMap<>(); - static { - TABLE_GROUPS.put(1, - Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", - "i_beneficiarycontacts", "i_beneficiarydetails", "i_beneficiaryfamilymapping", - "i_beneficiaryidentity", "i_beneficiarymapping")); - - TABLE_GROUPS.put(2, - Arrays.asList("t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", - "t_pnccare", "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", - "t_physicalactivity")); - - TABLE_GROUPS.put(3, - Arrays.asList("t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", - "t_sys_cardiovascular", "t_sys_respiratory", "t_sys_centralnervous", - "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem")); - - TABLE_GROUPS.put(4, - Arrays.asList("t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", - "t_benclinicalobservation", "t_prescription", "t_prescribeddrug", "t_lab_testorder", - "t_benreferdetails")); - - TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult", "t_physicalstockentry", "t_patientissue", - "t_facilityconsumption", "t_itemstockentry", "t_itemstockexit")); - - TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", - "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", - "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", - "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory")); - - TABLE_GROUPS.put(7, - Arrays.asList("t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", - "t_cancerobstetrichistory", "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", - "t_canceroralexamination", "t_cancerbreastexamination", "t_cancerabdominalexamination", - "t_cancergynecologicalexamination", "t_cancerdiagnosis", "t_cancerimageannotation")); - - TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); - - TABLE_GROUPS.put(9, - Arrays.asList("t_itemstockentry", "t_itemstockexit", "t_patientissue", "t_physicalstockentry", - "t_stockadjustment", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentissue", "t_indentorder", "t_saitemmapping")); + /* + * AMRIT – Accessible Medical Records via Integrated Technology + * Integrated EHR (Electronic Health Records) Solution + * + * Copyright (C) "Piramal Swasthya Management and Research Institute" + * + * This file is part of AMRIT. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see https://www.gnu.org/licenses/. + */ + package com.iemr.mmu.service.dataSyncLayerCentral; + + import java.time.LocalDateTime; + import java.util.ArrayList; + import java.util.Arrays; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.beans.factory.annotation.Autowired; + import org.springframework.stereotype.Service; + + import com.fasterxml.jackson.databind.ObjectMapper; + import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; + import com.iemr.mmu.service.dataSyncActivity.SyncResult; + + @Service + public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { + + private static final String SERVER_COLUMNS_NOT_REQUIRED = null; // Renamed for clarity + private static final Logger logger = LoggerFactory.getLogger(GetDataFromVanAndSyncToDBImpl.class); + + @Autowired + private DataSyncRepositoryCentral dataSyncRepositoryCentral; + + private static final Map> TABLE_GROUPS = new HashMap<>(); + static { + TABLE_GROUPS.put(1, + Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", + "i_beneficiarycontacts", "i_beneficiarydetails", "i_beneficiaryfamilymapping", + "i_beneficiaryidentity", "i_beneficiarymapping")); + + TABLE_GROUPS.put(2, + Arrays.asList("t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", + "t_pnccare", "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", + "t_physicalactivity")); + + TABLE_GROUPS.put(3, + Arrays.asList("t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", + "t_sys_cardiovascular", "t_sys_respiratory", "t_sys_centralnervous", + "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem")); + + TABLE_GROUPS.put(4, + Arrays.asList("t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", + "t_benclinicalobservation", "t_prescription", "t_prescribeddrug", "t_lab_testorder", + "t_benreferdetails")); + + TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult", "t_physicalstockentry", "t_patientissue", + "t_facilityconsumption", "t_itemstockentry", "t_itemstockexit")); + + TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", + "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", + "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", + "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory")); + + TABLE_GROUPS.put(7, + Arrays.asList("t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", + "t_cancerobstetrichistory", "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", + "t_canceroralexamination", "t_cancerbreastexamination", "t_cancerabdominalexamination", + "t_cancergynecologicalexamination", "t_cancerdiagnosis", "t_cancerimageannotation")); + + TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); + + TABLE_GROUPS.put(9, + Arrays.asList("t_itemstockentry", "t_itemstockexit", "t_patientissue", "t_physicalstockentry", + "t_stockadjustment", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentissue", "t_indentorder", "t_saitemmapping")); - } + } - public String syncDataToServer(String requestOBJ, String Authorization) throws Exception { + public String syncDataToServer(String requestOBJ, String Authorization) throws Exception { - ObjectMapper mapper = new ObjectMapper(); - SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); - List> dataToBesync = syncUploadDataDigester.getSyncData(); - logger.info("Data to be synced: {}", dataToBesync); - if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { - logger.error("Invalid SyncUploadDataDigester object or tableName is null."); - return "Error: Invalid sync request."; - } + ObjectMapper mapper = new ObjectMapper(); + SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncResults = new ArrayList<>(); // <-- define here - String syncTableName = syncUploadDataDigester.getTableName(); - logger.info("Syncing data for table: {}", syncTableName); - // Handle specific tables first, if their logic is distinct - if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { - String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester); - if ("data sync passed".equals(result)) { - return "Sync successful for m_beneficiaryregidmapping."; - } else { - logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); - return "Sync failed for m_beneficiaryregidmapping."; + logger.info("Data to be synced: {}", dataToBesync); + if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { + logger.error("Invalid SyncUploadDataDigester object or tableName is null."); + return "Error: Invalid sync request."; } - } - else { - boolean syncSuccess = true; - String errorMessage = ""; - if (syncTableName != null && !syncTableName.isEmpty()) { - boolean foundInGroup = false; - - for (Map map : dataToBesync) { - if (map.get("tableName") != null - && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { - syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, - syncUploadDataDigester); - foundInGroup = true; - break; - } - } - if (!foundInGroup) { - logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", - syncTableName); - syncSuccess = performGenericTableSync(syncUploadDataDigester); - } - } else { - for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { - Integer groupId = entry.getKey(); - List tablesInGroup = entry.getValue(); - for (String table : tablesInGroup) { - try { + String syncTableName = syncUploadDataDigester.getTableName(); + logger.info("Syncing data for table: {}", syncTableName); + // Handle specific tables first, if their logic is distinct + if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { + String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester, syncResults); + if ("data sync passed".equals(result)) { + return "Sync successful for m_beneficiaryregidmapping."; + } else { + logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); + return "Sync failed for m_beneficiaryregidmapping."; + } + } + else { + boolean syncSuccess = true; + String errorMessage = ""; + if (syncTableName != null && !syncTableName.isEmpty()) { + boolean foundInGroup = false; + + for (Map map : dataToBesync) { + if (map.get("tableName") != null + && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { + syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, + syncUploadDataDigester, syncResults); + foundInGroup = true; + break; + } + } + if (!foundInGroup) { + logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", + syncTableName); + syncSuccess = performGenericTableSync(syncUploadDataDigester, syncResults); + } + } else { - boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), - table, syncUploadDataDigester); - if (!currentTableSyncResult) { + for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { + Integer groupId = entry.getKey(); + List tablesInGroup = entry.getValue(); + for (String table : tablesInGroup) { + try { + + boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), + table, syncUploadDataDigester, syncResults); + if (!currentTableSyncResult) { + syncSuccess = false; + errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; + logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, + errorMessage); + + } else { + logger.info("Successfully synced table: {} in Group {}", table, groupId); + } + } catch (Exception e) { syncSuccess = false; - errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; - logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, - errorMessage); - - } else { - logger.info("Successfully synced table: {} in Group {}", table, groupId); + errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + + e.getMessage() + ". "; + logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, + e.getMessage(), e); } - } catch (Exception e) { - syncSuccess = false; - errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " - + e.getMessage() + ". "; - logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, - e.getMessage(), e); } } } - } - if (syncSuccess) { - return "Overall data sync passed."; - } else { - return "Overall data sync failed. Details: " + errorMessage; + // if (syncSuccess) { + // return "Overall data sync passed."; + // } else { + // return "Overall data sync failed. Details: " + errorMessage; + // } + Map responseMap = new HashMap<>(); + responseMap.put("statusCode", 200); + responseMap.put("message", "Data sync completed"); + responseMap.put("records", syncResults); + logger.info("Response = "+responseMap); + return new ObjectMapper().writeValueAsString(responseMap); + } -// Map responseMap = new HashMap<>(); -// responseMap.put("statusCode", 200); -// responseMap.put("message", "Data sync completed"); -// responseMap.put("records", syncResults); -// logger.info("Response = "+responseMap); -// return new ObjectMapper().writeValueAsString(responseMap); + } + private boolean syncTablesInGroup(String schemaName, String currentTableName, + SyncUploadDataDigester originalDigester, List syncResults) { + SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); + tableSpecificDigester.setSchemaName(schemaName); + tableSpecificDigester.setTableName(currentTableName); + tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); + tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); + tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); + tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); + tableSpecificDigester.setSyncData(originalDigester.getSyncData()); + return performGenericTableSync(tableSpecificDigester, syncResults); } - } - private boolean syncTablesInGroup(String schemaName, String currentTableName, - SyncUploadDataDigester originalDigester) { - SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); - tableSpecificDigester.setSchemaName(schemaName); - tableSpecificDigester.setTableName(currentTableName); - tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); - tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); - tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); - tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); - tableSpecificDigester.setSyncData(originalDigester.getSyncData()); - return performGenericTableSync(tableSpecificDigester); - } + private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( + SyncUploadDataDigester syncUploadDataDigester, List syncResults) { - private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( - SyncUploadDataDigester syncUploadDataDigester) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncData = new ArrayList<>(); - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncData = new ArrayList<>(); + String query = getqueryFor_M_BeneficiaryRegIdMapping(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); - String query = getqueryFor_M_BeneficiaryRegIdMapping(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName()); - - for (Map map : dataToBesync) { - if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { - Object[] objArr = new Object[4]; - objArr[0] = syncUploadDataDigester.getSyncedBy(); // SyncedBy - objArr[1] = String.valueOf(map.get("BenRegId")); - objArr[2] = String.valueOf(map.get("BeneficiaryID")); - objArr[3] = String.valueOf(map.get("VanID")); - syncData.add(objArr); - } else { - logger.warn( - "Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", - map); + for (Map map : dataToBesync) { + if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { + Object[] objArr = new Object[4]; + objArr[0] = syncUploadDataDigester.getSyncedBy(); // SyncedBy + objArr[1] = String.valueOf(map.get("BenRegId")); + objArr[2] = String.valueOf(map.get("BeneficiaryID")); + objArr[3] = String.valueOf(map.get("VanID")); + syncData.add(objArr); + } else { + logger.warn( + "Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", + map); + } } - } - - if (!syncData.isEmpty()) { - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, - syncData); - if (i.length == syncData.size()) { - logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); - return "data sync passed"; - } else { - logger.error( - "Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", - syncData.size(), i.length, getFailedRecords(i, syncData)); - return "Partial data sync for m_beneficiaryregidmapping."; + if (!syncData.isEmpty()) { + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, + syncData); + + if (i.length == syncData.size()) { + logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); + return "data sync passed"; + } else { + logger.error( + "Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", + syncData.size(), i.length, getFailedRecords(i, syncData)); + return "Partial data sync for m_beneficiaryregidmapping."; + } + } catch (Exception e) { + logger.error("Exception during update for m_beneficiaryregidmapping: {}", e.getMessage(), e); + return "Error during sync for m_beneficiaryregidmapping: " + e.getMessage(); } - } catch (Exception e) { - logger.error("Exception during update for m_beneficiaryregidmapping: {}", e.getMessage(), e); - return "Error during sync for m_beneficiaryregidmapping: " + e.getMessage(); + } else { + logger.info("No data to sync for m_beneficiaryregidmapping."); + return "data sync passed"; } - } else { - logger.info("No data to sync for m_beneficiaryregidmapping."); - return "data sync passed"; } - } - private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String tableName) { - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append(" SET "); - queryBuilder.append("Provisioned = true, SyncedDate = now(), syncedBy = ?"); - queryBuilder.append(" WHERE "); - queryBuilder.append(" BenRegId = ? "); - queryBuilder.append(" AND "); - queryBuilder.append(" BeneficiaryID = ? "); - queryBuilder.append(" AND "); - queryBuilder.append(" VanID = ? "); + private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String tableName) { + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append("Provisioned = true, SyncedDate = now(), syncedBy = ?"); + queryBuilder.append(" WHERE "); + queryBuilder.append(" BenRegId = ? "); + queryBuilder.append(" AND "); + queryBuilder.append(" BeneficiaryID = ? "); + queryBuilder.append(" AND "); + queryBuilder.append(" VanID = ? "); + + return queryBuilder.toString(); + } - return queryBuilder.toString(); - } + public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { + List syncData = new ArrayList<>(); - public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { - List syncData = new ArrayList<>(); + String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); - String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName()); + int limit = 1000; + int offset = 0; + int totalProcessed = 0; - int limit = 1000; - int offset = 0; - int totalProcessed = 0; + String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly - String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly + while (true) { + List> batch; + try { - while (true) { - List> batch; - try { + batch = dataSyncRepositoryCentral.getBatchForBenDetails( + syncUploadDataDigester, + problematicWhereClause, + limit, + offset); + } catch (Exception e) { + logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); + } - batch = dataSyncRepositoryCentral.getBatchForBenDetails( - syncUploadDataDigester, - problematicWhereClause, - limit, - offset); - } catch (Exception e) { - logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); - return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); + if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a + // "success" + logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); + return "data sync passed"; + } else { + logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); + return "No data processed or sync failed for i_beneficiarydetails."; + } } + } - if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a - // "success" - logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); - return "data sync passed"; - } else { - logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); - return "No data processed or sync failed for i_beneficiarydetails."; - } + private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append("Processed = 'P', SyncedDate = now(), SyncedBy = ? "); + queryBuilder.append(" WHERE "); + queryBuilder.append("BeneficiaryDetailsId = ? "); + queryBuilder.append(" AND "); + queryBuilder.append("VanID = ? "); + return queryBuilder.toString(); } - } - private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append(" SET "); - queryBuilder.append("Processed = 'P', SyncedDate = now(), SyncedBy = ? "); - queryBuilder.append(" WHERE "); - queryBuilder.append("BeneficiaryDetailsId = ? "); - queryBuilder.append(" AND "); - queryBuilder.append("VanID = ? "); - return queryBuilder.toString(); - } + /** + * Handles the generic synchronization logic for tables not covered by specific + * handlers. + */ - /** - * Handles the generic synchronization logic for tables not covered by specific - * handlers. - */ + private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, List syncResults) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncDataListInsert = new ArrayList<>(); + List syncDataListUpdate = new ArrayList<>(); + // List syncResults = new ArrayList<>(); - private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester) { - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncDataListInsert = new ArrayList<>(); - List syncDataListUpdate = new ArrayList<>(); - List syncResults = new ArrayList<>(); + if (dataToBesync == null || dataToBesync.isEmpty()) { + logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); + return true; // Nothing to sync, consider it a success + } - if (dataToBesync == null || dataToBesync.isEmpty()) { - logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); - return true; // Nothing to sync, consider it a success - } + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String schemaName = syncUploadDataDigester.getSchemaName(); + Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + String serverColumns = syncUploadDataDigester.getServerColumns(); - String syncTableName = syncUploadDataDigester.getTableName(); - String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); - String schemaName = syncUploadDataDigester.getSchemaName(); - Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); - String serverColumns = syncUploadDataDigester.getServerColumns(); - - int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); - - List serverColumnsList = Arrays.asList(serverColumns.split(",")); - - for (Map map : dataToBesync) { - // Create a new map with clean column names as keys - Map cleanRecord = new HashMap<>(); - for (String key : map.keySet()) { - String cleanKey = key; - // Handle keys with SQL functions like date_format - if (key.startsWith("date_format(") && key.endsWith(")")) { - int start = key.indexOf("(") + 1; - int end = key.indexOf(","); - if (end > start) { - cleanKey = key.substring(start, end).trim(); - } else { - // Fallback if format is unexpected - cleanKey = key.substring(start, key.indexOf(")")).trim(); + int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); + + List serverColumnsList = Arrays.asList(serverColumns.split(",")); + + for (Map map : dataToBesync) { + // Create a new map with clean column names as keys + Map cleanRecord = new HashMap<>(); + for (String key : map.keySet()) { + String cleanKey = key; + // Handle keys with SQL functions like date_format + if (key.startsWith("date_format(") && key.endsWith(")")) { + int start = key.indexOf("(") + 1; + int end = key.indexOf(","); + if (end > start) { + cleanKey = key.substring(start, end).trim(); + } else { + // Fallback if format is unexpected + cleanKey = key.substring(start, key.indexOf(")")).trim(); + } } + cleanRecord.put(cleanKey.trim(), map.get(key)); } - cleanRecord.put(cleanKey.trim(), map.get(key)); - } - String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); - String vanID = String.valueOf(cleanRecord.get("VanID")); - int syncFacilityID = 0; - - // Update SyncedBy and SyncedDate in the xmap itself before processing - cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); - cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); - - if (facilityIDFromDigester != null) { - // Determine the 'Processed' status based on facility ID for specific tables - switch (syncTableName.toLowerCase()) { - case "t_indent": - case "t_indentorder": { - if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { - Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); - if (fromFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); + String vanID = String.valueOf(cleanRecord.get("VanID")); + int syncFacilityID = 0; + + // Update SyncedBy and SyncedDate in the xmap itself before processing + cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); + + if (facilityIDFromDigester != null) { + // Determine the 'Processed' status based on facility ID for specific tables + switch (syncTableName.toLowerCase()) { + case "t_indent": + case "t_indentorder": { + if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { + Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); + if (fromFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_indentissue": { - if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { - Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); - if (toFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_indentissue": { + if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { + Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); + if (toFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_stocktransfer": { - if (cleanRecord.containsKey("TransferToFacilityID") - && cleanRecord.get("TransferToFacilityID") instanceof Number) { - Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); - if (transferToFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_stocktransfer": { + if (cleanRecord.containsKey("TransferToFacilityID") + && cleanRecord.get("TransferToFacilityID") instanceof Number) { + Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); + if (transferToFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_itemstockentry": { - if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { - Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); - if (mapFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_itemstockentry": { + if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { + Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); + if (mapFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; + default: + // No specific facility ID logic for other tables + break; } - default: - // No specific facility ID logic for other tables - break; } - } - // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot - if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { - syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); - } - - int recordCheck; - try { - recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); - logger.info("Test Record check="+recordCheck); - logger.info("Test all the data="+ schemaName +":: Tble="+ syncTableName+":: vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: vanAutoIncColumnName="+vanAutoIncColumnName+":: syncFacilityID="+syncFacilityID); - } catch (Exception e) { - logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", - syncTableName, vanSerialNo, vanID, e.getMessage(), e); - return false; // Critical error, stop sync for this table - } + // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { + syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); + } - // Prepare Object array for insert/update - List currentRecordValues = new ArrayList<>(); - for (String column : serverColumnsList) { - Object value = cleanRecord.get(column.trim()); - if (value instanceof Boolean) { - currentRecordValues.add(value); - } else if (value != null) { - currentRecordValues.add(String.valueOf(value)); - } else { - currentRecordValues.add(null); + int recordCheck; + try { + recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + logger.info("Test Record check="+recordCheck); + logger.info("Test all the data="+ schemaName +":: Tble="+ syncTableName+":: vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: vanAutoIncColumnName="+vanAutoIncColumnName+":: syncFacilityID="+syncFacilityID); + } catch (Exception e) { + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", + syncTableName, vanSerialNo, vanID, e.getMessage(), e); + return false; // Critical error, stop sync for this table } - } - Object[] objArr = currentRecordValues.toArray(); - logger.info("Test Obj Arr {}",objArr); - if (recordCheck == 0) { - syncDataListInsert.add(objArr); - syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); - } else { - // For update, append the WHERE clause parameters at the end of the array - List updateParams = new ArrayList<>(Arrays.asList(objArr)); - updateParams.add(String.valueOf(vanSerialNo)); + // Prepare Object array for insert/update + List currentRecordValues = new ArrayList<>(); + for (String column : serverColumnsList) { + Object value = cleanRecord.get(column.trim()); + if (value instanceof Boolean) { + currentRecordValues.add(value); + } else if (value != null) { + currentRecordValues.add(String.valueOf(value)); + } else { + currentRecordValues.add(null); + } + } - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { - updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + Object[] objArr = currentRecordValues.toArray(); + logger.info("Test Obj Arr {}",objArr); + if (recordCheck == 0) { + syncDataListInsert.add(objArr); + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); } else { - updateParams.add(String.valueOf(vanID)); + // For update, append the WHERE clause parameters at the end of the array + List updateParams = new ArrayList<>(Arrays.asList(objArr)); + updateParams.add(String.valueOf(vanSerialNo)); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { + updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + } else { + updateParams.add(String.valueOf(vanID)); + } + syncDataListUpdate.add(updateParams.toArray()); + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); + } - syncDataListUpdate.add(updateParams.toArray()); - syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); + } + boolean insertSuccess = true; + boolean updateSuccess = true; + + if (!syncDataListInsert.isEmpty()) { + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + logger.info("Query Insert="+queryInsert); + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); + for (int k = 0; k < i.length; k++) { + if (i[k] < 1) { + syncResults.set(k, new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), // VanSerialNo position + syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); + } + } + } catch (Exception e) { + for (int k = 0; k < syncDataListInsert.size(); k++) { + syncResults.add(new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), + syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); } } - boolean insertSuccess = true; - boolean updateSuccess = true; - - if (!syncDataListInsert.isEmpty()) { - String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); -logger.info("Query Insert="+queryInsert); -try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); - for (int k = 0; k < i.length; k++) { - if (i[k] < 1) { - syncResults.set(k, new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), // VanSerialNo position - syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); + // try { + // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + // serverColumns, queryInsert, syncDataListInsert); + // logger.info("Insert result array length: {}", i.length); + // logger.info("Expected insert size: {}", syncDataListInsert.size()); + // if (i.length != syncDataListInsert.size()) { + // insertSuccess = false; + // logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", + // syncTableName, syncDataListInsert.size(), i.length, + // getFailedRecords(i, syncDataListInsert)); + // } else { + // logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); + // } + // } catch (Exception e) { + // insertSuccess = false; + // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListInsert)); + // logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + // } } - } -} catch (Exception e) { - for (int k = 0; k < syncDataListInsert.size(); k++) { - syncResults.add(new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), - syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); - } -} - - // try { - // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - // serverColumns, queryInsert, syncDataListInsert); - // logger.info("Insert result array length: {}", i.length); - // logger.info("Expected insert size: {}", syncDataListInsert.size()); - // if (i.length != syncDataListInsert.size()) { - // insertSuccess = false; - // logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", - // syncTableName, syncDataListInsert.size(), i.length, - // getFailedRecords(i, syncDataListInsert)); - // } else { - // logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); - // } - // } catch (Exception e) { - // insertSuccess = false; - // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListInsert)); - // logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); - // } - } - if (!syncDataListUpdate.isEmpty()) { - String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); - logger.info("Query Update="+queryUpdate); - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryUpdate, syncDataListUpdate); - logger.info("Test Update result array length: {}", i.length); - logger.info("Test Expected update size: {}", syncDataListUpdate.size()); - for (int k = 0; k < i.length; k++) { - if (i[k] < 1) { - syncResults.set(k, new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), // VanSerialNo position - syncUploadDataDigester.getSyncedBy(), false, "Update failed")); + if (!syncDataListUpdate.isEmpty()) { + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + logger.info("Query Update="+queryUpdate); + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryUpdate, syncDataListUpdate); + logger.info("Test Update result array length: {}", i.length); + logger.info("Test Expected update size: {}", syncDataListUpdate.size()); + for (int k = 0; k < i.length; k++) { + if (i[k] < 1) { + syncResults.set(k, new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), // VanSerialNo position + syncUploadDataDigester.getSyncedBy(), false, "Update failed")); + } + } + } catch (Exception e) { + for (int k = 0; k < syncDataListUpdate.size(); k++) { + syncResults.add(new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), + syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); } - } -} catch (Exception e) { - for (int k = 0; k < syncDataListUpdate.size(); k++) { - syncResults.add(new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), - syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); - } -} - - // try { - // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - // SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); - // logger.info("Update result array length: {}", j.length); - // logger.info("Expected update size: {}", syncDataListUpdate.size()); - // if (j.length != syncDataListUpdate.size()) { - // updateSuccess = false; - // logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", - // syncTableName, syncDataListUpdate.size(), j.length, - // getFailedRecords(j, syncDataListUpdate)); - // } else { - // logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); - // } - // } catch (Exception e) { - // updateSuccess = false; - // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListUpdate)); - // logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); - // } - } - return insertSuccess && updateSuccess; -} - private String getQueryToInsertDataToServerDB(String schemaName, String - tableName, String serverColumns) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); - - StringBuilder preparedStatementSetter = new StringBuilder(); - - if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append("?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } - } } - StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append("("); - queryBuilder.append(serverColumns); - queryBuilder.append(") VALUES ("); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(")"); - logger.info("Test Query Builder: {}", queryBuilder.toString()); - return queryBuilder.toString(); + // try { + // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + // SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + // logger.info("Update result array length: {}", j.length); + // logger.info("Expected update size: {}", syncDataListUpdate.size()); + // if (j.length != syncDataListUpdate.size()) { + // updateSuccess = false; + // logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", + // syncTableName, syncDataListUpdate.size(), j.length, + // getFailedRecords(j, syncDataListUpdate)); + // } else { + // logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); + // } + // } catch (Exception e) { + // updateSuccess = false; + // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListUpdate)); + // logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + // } + } + logger.info("Sync results for table {}: {}", syncTableName, syncResults); + return insertSuccess && updateSuccess; } - - public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { + private String getQueryToInsertDataToServerDB(String schemaName, String + tableName, String serverColumns) { String[] columnsArr = null; if (serverColumns != null) - columnsArr = serverColumns.split(","); + columnsArr = serverColumns.split(","); StringBuilder preparedStatementSetter = new StringBuilder(); if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - String column = columnsArr[i].trim(); - preparedStatementSetter.append(column).append(" = ?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } - } + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } } - StringBuilder queryBuilder = new StringBuilder("UPDATE "); + StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append(" SET "); + queryBuilder.append("("); + queryBuilder.append(serverColumns); + queryBuilder.append(") VALUES ("); queryBuilder.append(preparedStatementSetter); - queryBuilder.append(" WHERE VanSerialNo = ? "); - - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(tableName.toLowerCase())) { - queryBuilder.append(" AND SyncFacilityID = ? "); - } else { - queryBuilder.append(" AND VanID = ? "); - } + queryBuilder.append(")"); logger.info("Test Query Builder: {}", queryBuilder.toString()); return queryBuilder.toString(); - } + } - // Helper to get information about failed records (for logging purposes) - private String getFailedRecords(int[] results, List data) { - logger.info("Inside get Failed Records"); - List failedRecordsInfo = new ArrayList<>(); - for (int k = 0; k < results.length; k++) { - // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or - // Statement.SUCCESS_NO_INFO - // usually indicates a failure or success without specific row count. - // A common return value for success is 1 (for one row updated/inserted). - if (results[k] < 1) { // Assuming 1 means success, and anything else (0, -2, etc.) means failure - // Attempt to get some identifiable info from the failed record - if (data.get(k).length > 0) { - failedRecordsInfo.add( - "Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); - } else { - failedRecordsInfo.add("Record at index " + k + " (No identifiable info)"); + public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); + + StringBuilder preparedStatementSetter = new StringBuilder(); + + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + String column = columnsArr[i].trim(); + preparedStatementSetter.append(column).append(" = ?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } } } + + StringBuilder queryBuilder = new StringBuilder("UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(" WHERE VanSerialNo = ? "); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(tableName.toLowerCase())) { + queryBuilder.append(" AND SyncFacilityID = ? "); + } else { + queryBuilder.append(" AND VanID = ? "); + } + logger.info("Test Query Builder: {}", queryBuilder.toString()); + return queryBuilder.toString(); + } + + // Helper to get information about failed records (for logging purposes) + private String getFailedRecords(int[] results, List data) { + logger.info("Inside get Failed Records"); + List failedRecordsInfo = new ArrayList<>(); + for (int k = 0; k < results.length; k++) { + // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or + // Statement.SUCCESS_NO_INFO + // usually indicates a failure or success without specific row count. + // A common return value for success is 1 (for one row updated/inserted). + if (results[k] < 1) { // Assuming 1 means success, and anything else (0, -2, etc.) means failure + // Attempt to get some identifiable info from the failed record + if (data.get(k).length > 0) { + failedRecordsInfo.add( + "Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); + } else { + failedRecordsInfo.add("Record at index " + k + " (No identifiable info)"); + } + } + } + logger.info("Failed records info: {}", failedRecordsInfo); + return String.join("; ", failedRecordsInfo); } - logger.info("Failed records info: {}", failedRecordsInfo); - return String.join("; ", failedRecordsInfo); - } - -} \ No newline at end of file + + } \ No newline at end of file From 69347fe32b20f826dd35af811f6ead09a565d387 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 16:25:28 +0530 Subject: [PATCH 33/45] fix: add syncResults --- .../dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 74031692..ee5d7812 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -176,6 +176,7 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E responseMap.put("message", "Data sync completed"); responseMap.put("records", syncResults); logger.info("Response = "+responseMap); + logger.info("Sync Results = "+syncResults); return new ObjectMapper().writeValueAsString(responseMap); } From 80d49f17c7799c4a552c4cdbf80e637d1448365b Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 19:58:46 +0530 Subject: [PATCH 34/45] fix: update processed flag --- .../dataSyncActivity/DataSyncRepository.java | 14 ++-- .../UploadDataToServerImpl.java | 75 +++++++++++++++---- .../GetDataFromVanAndSyncToDBImpl.java | 33 +++++++- 3 files changed, 99 insertions(+), 23 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index f7a55d4a..c48ea001 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -98,8 +98,8 @@ public List> getDataForGivenSchemaAndTable(String schema, St return resultSetList; } - public int updateProcessedFlagInVan(String schemaName, String tableName, StringBuilder vanSerialNos, - String autoIncreamentColumn, String user) throws Exception { + public int updateProcessedFlagInVan(String schemaName, String tableName, List vanSerialNos, + String autoIncreamentColumn, String user, String status) throws Exception { jdbcTemplate = getJdbcTemplate(); String query = ""; @@ -107,16 +107,16 @@ public int updateProcessedFlagInVan(String schemaName, String tableName, StringB if (tableName != null && tableName.toLowerCase().equals("i_ben_flow_outreach")) { query = "UPDATE " + schemaName + "." + tableName - + " SET created_date = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " - + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + + " SET created_date = ? , processed = ?, SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + String.join(",", vanSerialNos) + ")"; } else { query = "UPDATE " + schemaName + "." + tableName - + " SET CreatedDate = ? , processed = 'P', SyncedDate = ?, Syncedby = ? " - + "WHERE " + autoIncreamentColumn + " IN (" + vanSerialNos + ")"; + + " SET CreatedDate = ? , processed = ?, SyncedDate = ?, Syncedby = ? " + + "WHERE " + autoIncreamentColumn + " IN (" + String.join(",", vanSerialNos) + ")"; } Timestamp syncedDate = new Timestamp(System.currentTimeMillis()); - int updatedRows = jdbcTemplate.update(query, syncedDate, syncedDate, user); + int updatedRows = jdbcTemplate.update(query, syncedDate, status, syncedDate, user); return updatedRows; } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 93ce0505..f230a9c0 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; +import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -347,21 +348,67 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S String.class); logger.info("Response for thes erver="+response); logger.info("Response body="+response.getBody()); - int i = 0; - if (response != null && response.hasBody()) { - JSONObject obj = new JSONObject(response.getBody()); - if (obj != null && obj.has("statusCode") && obj.getInt("statusCode") == 200) { - StringBuilder vanSerialNos = getVanSerialNoListForSyncedData(vanAutoIncColumnName, dataToBesync); + int successCount = 0; + int failCount = 0; + List successVanSerialNos = new ArrayList<>(); + List failedVanSerialNos = new ArrayList<>(); + + if (response != null && response.hasBody()) { + JSONObject obj = new JSONObject(response.getBody()); + if (obj.has("data")) { + JSONObject dataObj = obj.getJSONObject("data"); + if (dataObj.has("records")) { + JSONArray recordsArr = dataObj.getJSONArray("records"); + for (int i = 0; i < recordsArr.length(); i++) { + JSONObject record = recordsArr.getJSONObject(i); + String vanSerialNo = record.getString("vanSerialNo"); + boolean success = record.getBoolean("success"); + if (success) { + successVanSerialNos.add(vanSerialNo); + successCount++; + } else { + failedVanSerialNos.add(vanSerialNo); + failCount++; + } + } + } + } + } + +logger.info("Success Van Serial No="+successVanSerialNos.toString()); +logger.info("Failed Van Serial No="+failedVanSerialNos.toString()); + // Update processed flag for success and failed vanSerialNos + if (!successVanSerialNos.isEmpty()) { + dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, + vanAutoIncColumnName, user, "P"); + } + if (!failedVanSerialNos.isEmpty()) { + dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, failedVanSerialNos, + vanAutoIncColumnName, user, "F"); + } + + if (successCount > 0 && failCount == 0) + return "Data successfully synced"; + else if (successCount > 0 && failCount > 0) + return "Partial success: " + successCount + " records synced, " + failCount + " failed"; + else + return "Sync failed"; + } + // int i = 0; + // if (response != null && response.hasBody()) { + // JSONObject obj = new JSONObject(response.getBody()); + // if (obj != null && obj.has("statusCode") && obj.getInt("statusCode") == 200) { + // StringBuilder vanSerialNos = getVanSerialNoListForSyncedData(vanAutoIncColumnName, dataToBesync); - i = dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, vanSerialNos, - vanAutoIncColumnName, user); - } - } - if (i > 0) - return "Data successfully synced"; - else - return null; - } + // i = dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, vanSerialNos, + // vanAutoIncColumnName, user); + // } + // } + // if (i > 0) + // return "Data successfully synced"; + // else + // return null; + // } /** * diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index ee5d7812..f4935abc 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -478,10 +478,25 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } } } catch (Exception e) { + + String shortReason; +if (e.getMessage() != null) { + if (e.getMessage().contains("duplicate") || e.getMessage().contains("Duplicate")) { + shortReason = "Duplicate entry"; + } else if (e.getMessage().contains("constraint")) { + shortReason = "Constraint violation"; + } else if (e.getMessage().contains("timeout")) { + shortReason = "DB timeout"; + } else { + shortReason = "Insert/Update failed"; + } +} else { + shortReason = "Unknown DB error"; +} for (int k = 0; k < syncDataListInsert.size(); k++) { syncResults.add(new SyncResult(schemaName, syncTableName, String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), - syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); + syncUploadDataDigester.getSyncedBy(), false, shortReason)); } } @@ -520,10 +535,24 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } } } catch (Exception e) { + String shortReason; +if (e.getMessage() != null) { + if (e.getMessage().contains("duplicate") || e.getMessage().contains("Duplicate")) { + shortReason = "Duplicate entry"; + } else if (e.getMessage().contains("constraint")) { + shortReason = "Constraint violation"; + } else if (e.getMessage().contains("timeout")) { + shortReason = "DB timeout"; + } else { + shortReason = "Insert/Update failed"; + } +} else { + shortReason = "Unknown DB error"; +} for (int k = 0; k < syncDataListUpdate.size(); k++) { syncResults.add(new SyncResult(schemaName, syncTableName, String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), - syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); + syncUploadDataDigester.getSyncedBy(), false, shortReason )); } } From f9f77a6a403daa581fb6a2765058bb46250f7351 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Tue, 23 Sep 2025 22:00:29 +0530 Subject: [PATCH 35/45] fix: update the response --- .../UploadDataToServerImpl.java | 75 ++++++++++++++----- 1 file changed, 57 insertions(+), 18 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index f230a9c0..179855d5 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -353,27 +353,66 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S List successVanSerialNos = new ArrayList<>(); List failedVanSerialNos = new ArrayList<>(); - if (response != null && response.hasBody()) { - JSONObject obj = new JSONObject(response.getBody()); - if (obj.has("data")) { - JSONObject dataObj = obj.getJSONObject("data"); - if (dataObj.has("records")) { - JSONArray recordsArr = dataObj.getJSONArray("records"); - for (int i = 0; i < recordsArr.length(); i++) { - JSONObject record = recordsArr.getJSONObject(i); - String vanSerialNo = record.getString("vanSerialNo"); - boolean success = record.getBoolean("success"); - if (success) { - successVanSerialNos.add(vanSerialNo); - successCount++; - } else { - failedVanSerialNos.add(vanSerialNo); - failCount++; - } - } + // if (response != null && response.hasBody()) { + // JSONObject obj = new JSONObject(response.getBody()); + // if (obj.has("data")) { + // JSONObject dataObj = obj.getJSONObject("data"); + // if (dataObj.has("records")) { + // JSONArray recordsArr = dataObj.getJSONArray("records"); + // for (int i = 0; i < recordsArr.length(); i++) { + // JSONObject record = recordsArr.getJSONObject(i); + // String vanSerialNo = record.getString("vanSerialNo"); + // boolean success = record.getBoolean("success"); + // if (success) { + // successVanSerialNos.add(vanSerialNo); + // successCount++; + // } else { + // failedVanSerialNos.add(vanSerialNo); + // failCount++; + // } + // } + // } + // } + // } + + if (response != null && response.hasBody()) { + JSONObject obj = new JSONObject(response.getBody()); + if (obj.has("data")) { + JSONObject dataObj = obj.getJSONObject("data"); + if (dataObj.has("records")) { + JSONArray recordsArr = dataObj.getJSONArray("records"); + for (int i = 0; i < recordsArr.length(); i++) { + JSONObject record = recordsArr.getJSONObject(i); + String vanSerialNo = record.getString("vanSerialNo"); + boolean success = record.getBoolean("success"); + if (success) { + successVanSerialNos.add(vanSerialNo); + successCount++; + } else { + failedVanSerialNos.add(vanSerialNo); + failCount++; } } + } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { + // Handle summary response for m_beneficiaryregidmapping + String respMsg = dataObj.optString("response", ""); + int statusCode = obj.optInt("statusCode", 0); + if (respMsg.toLowerCase().contains("success") && statusCode == 200) { + // All records are successful + for (Map map : dataToBesync) { + successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + successCount = successVanSerialNos.size(); + } else { + // All records failed + for (Map map : dataToBesync) { + failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + failCount = failedVanSerialNos.size(); + } } + } +} logger.info("Success Van Serial No="+successVanSerialNos.toString()); logger.info("Failed Van Serial No="+failedVanSerialNos.toString()); From 63385443062e39e8fbef4fb2e153d68d0cbe9f3b Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 10:33:47 +0530 Subject: [PATCH 36/45] fix: update the exception block --- .../UploadDataToServerImpl.java | 60 ++++++++++++++- .../GetDataFromVanAndSyncToDBImpl.java | 76 ++++++++++--------- 2 files changed, 95 insertions(+), 41 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 179855d5..315b3feb 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -375,8 +375,48 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S // } // } - if (response != null && response.hasBody()) { +// if (response != null && response.hasBody()) { +// JSONObject obj = new JSONObject(response.getBody()); +// if (obj.has("data")) { +// JSONObject dataObj = obj.getJSONObject("data"); +// if (dataObj.has("records")) { +// JSONArray recordsArr = dataObj.getJSONArray("records"); +// for (int i = 0; i < recordsArr.length(); i++) { +// JSONObject record = recordsArr.getJSONObject(i); +// String vanSerialNo = record.getString("vanSerialNo"); +// boolean success = record.getBoolean("success"); +// if (success) { +// successVanSerialNos.add(vanSerialNo); +// successCount++; +// } else { +// failedVanSerialNos.add(vanSerialNo); +// failCount++; +// } +// } +// } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { +// // Handle summary response for m_beneficiaryregidmapping +// String respMsg = dataObj.optString("response", ""); +// int statusCode = obj.optInt("statusCode", 0); +// if (respMsg.toLowerCase().contains("success") && statusCode == 200) { +// // All records are successful +// for (Map map : dataToBesync) { +// successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); +// } +// successCount = successVanSerialNos.size(); +// } else { +// // All records failed +// for (Map map : dataToBesync) { +// failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); +// } +// failCount = failedVanSerialNos.size(); +// } +// } +// } +// } + +if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); + boolean handled = false; if (obj.has("data")) { JSONObject dataObj = obj.getJSONObject("data"); if (dataObj.has("records")) { @@ -393,23 +433,35 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S failCount++; } } + handled = true; } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { - // Handle summary response for m_beneficiaryregidmapping String respMsg = dataObj.optString("response", ""); int statusCode = obj.optInt("statusCode", 0); if (respMsg.toLowerCase().contains("success") && statusCode == 200) { - // All records are successful for (Map map : dataToBesync) { successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); } successCount = successVanSerialNos.size(); } else { - // All records failed for (Map map : dataToBesync) { failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); } failCount = failedVanSerialNos.size(); } + handled = true; + } + } + // Handle unexpected error response (like statusCode 5000) + if (!handled) { + int statusCode = obj.optInt("statusCode", 0); + String errorMsg = obj.optString("errorMessage", "Unknown error"); + if (statusCode >= 5000) { + // Mark all as failed and log error + for (Map map : dataToBesync) { + failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + failCount = failedVanSerialNos.size(); + logger.error("Server error for table {}: {}", tableName, errorMsg); } } } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index f4935abc..15e60029 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -477,28 +477,29 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); } } - } catch (Exception e) { - - String shortReason; -if (e.getMessage() != null) { - if (e.getMessage().contains("duplicate") || e.getMessage().contains("Duplicate")) { - shortReason = "Duplicate entry"; - } else if (e.getMessage().contains("constraint")) { - shortReason = "Constraint violation"; - } else if (e.getMessage().contains("timeout")) { - shortReason = "DB timeout"; + } + catch (Exception e) { + String shortReason; + if (e.getMessage() != null) { + if (e.getMessage().toLowerCase().contains("duplicate")) { + shortReason = "Duplicate entry"; + } else if (e.getMessage().toLowerCase().contains("constraint")) { + shortReason = "Constraint violation"; + } else if (e.getMessage().toLowerCase().contains("timeout")) { + shortReason = "DB timeout"; + } else { + shortReason = "Insert/Update failed"; + } } else { - shortReason = "Insert/Update failed"; + shortReason = "Unknown DB error"; } -} else { - shortReason = "Unknown DB error"; -} - for (int k = 0; k < syncDataListInsert.size(); k++) { - syncResults.add(new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), - syncUploadDataDigester.getSyncedBy(), false, shortReason)); - } + // Always add, never set, to avoid index errors + for (int k = 0; k < syncDataListInsert.size(); k++) { + syncResults.add(new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), + syncUploadDataDigester.getSyncedBy(), false, shortReason)); } +} // try { // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, @@ -534,27 +535,28 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig syncUploadDataDigester.getSyncedBy(), false, "Update failed")); } } - } catch (Exception e) { - String shortReason; -if (e.getMessage() != null) { - if (e.getMessage().contains("duplicate") || e.getMessage().contains("Duplicate")) { - shortReason = "Duplicate entry"; - } else if (e.getMessage().contains("constraint")) { - shortReason = "Constraint violation"; - } else if (e.getMessage().contains("timeout")) { - shortReason = "DB timeout"; + } + catch (Exception e) { + String shortReason; + if (e.getMessage() != null) { + if (e.getMessage().toLowerCase().contains("duplicate")) { + shortReason = "Duplicate entry"; + } else if (e.getMessage().toLowerCase().contains("constraint")) { + shortReason = "Constraint violation"; + } else if (e.getMessage().toLowerCase().contains("timeout")) { + shortReason = "DB timeout"; + } else { + shortReason = "Insert/Update failed"; + } } else { - shortReason = "Insert/Update failed"; + shortReason = "Unknown DB error"; } -} else { - shortReason = "Unknown DB error"; -} - for (int k = 0; k < syncDataListUpdate.size(); k++) { - syncResults.add(new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), - syncUploadDataDigester.getSyncedBy(), false, shortReason )); - } + for (int k = 0; k < syncDataListUpdate.size(); k++) { + syncResults.add(new SyncResult(schemaName, syncTableName, + String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), + syncUploadDataDigester.getSyncedBy(), false, shortReason)); } +} // try { // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, From a0639a878d1682f4629d18b96619bba3f93fc7f6 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 13:31:14 +0530 Subject: [PATCH 37/45] fix: upload fix --- .../UploadDataToServerImpl.java | 519 +++++++++--------- 1 file changed, 258 insertions(+), 261 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 315b3feb..a9e9aa79 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -89,22 +89,15 @@ public class UploadDataToServerImpl implements UploadDataToServer { @Autowired private CookieUtil cookieUtil; - // batch size for data upload - // private static final int BATCH_SIZE = 30; - /** * * @param groupName * @param Authorization * @return */ - // @Transactional(propagation = Propagation.REQUIRES_NEW, rollbackFor = { - // Exception.class }) public String getDataToSyncToServer(int vanID, String user, String Authorization, String token) throws Exception { - String syncData = null; syncData = syncIntercepter(vanID, user, Authorization, token); - return syncData; } @@ -114,148 +107,275 @@ public String getDataToSyncToServer(int vanID, String user, String Authorization * @return */ public String syncIntercepter(int vanID, String user, String Authorization, String token) throws Exception { - // sync activity trigger - String serverAcknowledgement = startDataSync(vanID, user, Authorization, token); - return serverAcknowledgement; } /** + * Enhanced startDataSync method with table-level and group-level tracking * * @param syncTableDetailsIDs * @param Authorization * @return */ - private String startDataSync(int vanID, String user, String Authorization, String token) throws Exception { String serverAcknowledgement = null; - List> responseStatus = new ArrayList<>(); - boolean isProgress = false; + List> responseStatus = new ArrayList<>(); boolean hasSyncFailed = false; ObjectMapper objectMapper = new ObjectMapper(); + // fetch group masters List dataSyncGroupList = dataSyncGroupsRepo.findByDeleted(false); logger.debug("Fetched DataSyncGroups: {}", objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(dataSyncGroupList)); + for (DataSyncGroups dataSyncGroups : dataSyncGroupList) { int groupId = dataSyncGroups.getSyncTableGroupID(); + String groupName = dataSyncGroups.getSyncTableGroupName(); // Get group name if available + List syncUtilityClassList = getVanAndServerColumns(groupId); logger.debug("Fetched SyncUtilityClass for groupId {}: {}", groupId, objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncUtilityClassList)); - List> syncData; - List> syncDataBatch; - Map groupIdStatus = new HashMap<>(); + + // Track table-level results for this group + List> tableDetailsList = new ArrayList<>(); + boolean groupHasFailures = false; + for (SyncUtilityClass obj : syncUtilityClassList) { - // if (!isProgress) { + String tableKey = obj.getSchemaName() + "." + obj.getTableName(); + boolean tableHasError = false; // Move this to the correct scope + // get data from DB to sync to server - syncData = getDataToSync(obj.getSchemaName(), obj.getTableName(), obj.getVanColumnName()); + List> syncData = getDataToSync(obj.getSchemaName(), obj.getTableName(), + obj.getVanColumnName()); logger.debug("Fetched syncData for schema {} and table {}: {}", obj.getSchemaName(), obj.getTableName(), objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncData)); - // System.out.println(new Gson().toJson(syncData)); + if (syncData != null && syncData.size() > 0) { int dataSize = syncData.size(); int startIndex = 0; int fullBatchCount = dataSize / BATCH_SIZE; int remainder = dataSize % BATCH_SIZE; + // Track table-level success/failure counts + int totalRecords = dataSize; + int successfulRecords = 0; + int failedRecords = 0; + logger.info("Starting batch sync for schema: {}, table: {} with {} full batches and {} remainder", obj.getSchemaName(), obj.getTableName(), fullBatchCount, remainder); - - for (int i = 0; i < fullBatchCount; i++) { - syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, + // Process full batches + for (int i = 0; i < fullBatchCount && !tableHasError; i++) { + List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, BATCH_SIZE); serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, Authorization, token); logger.debug("Server acknowledgement for batch {}: {}", i, serverAcknowledgement); - if (serverAcknowledgement == null || !serverAcknowledgement.contains("success")) { + // Parse the string response from syncDataToServer method + if (serverAcknowledgement == null) { logger.error("Sync failed for batch {} in schema: {}, table: {}", i, obj.getSchemaName(), obj.getTableName()); - hasSyncFailed = true; - setResponseStatus(groupIdStatus, groupId, "failed", responseStatus); + tableHasError = true; + failedRecords += syncDataBatch.size(); + groupHasFailures = true; break; + } else if ("Data successfully synced".equals(serverAcknowledgement)) { + successfulRecords += syncDataBatch.size(); + } else if (serverAcknowledgement.startsWith("Partial success:")) { + // Parse "Partial success: X records synced, Y failed" + try { + String[] parts = serverAcknowledgement.split(" "); + int batchSuccess = Integer.parseInt(parts[2]); + int batchFailed = Integer.parseInt(parts[5]); + successfulRecords += batchSuccess; + failedRecords += batchFailed; + } catch (Exception e) { + logger.warn("Could not parse partial success counts for batch {}: {}", i, + serverAcknowledgement); + // Assume half successful, half failed as fallback + successfulRecords += syncDataBatch.size() / 2; + failedRecords += syncDataBatch.size() - (syncDataBatch.size() / 2); + } + } else if ("Sync failed".equals(serverAcknowledgement)) { + failedRecords += syncDataBatch.size(); + groupHasFailures = true; } startIndex += BATCH_SIZE; } - if (!hasSyncFailed && remainder > 0) { - syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, + // Process remainder batch if no error occurred + if (!tableHasError && remainder > 0) { + List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, remainder); serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, Authorization, token); - if (serverAcknowledgement == null || !serverAcknowledgement.contains("success")) { + if (serverAcknowledgement == null) { logger.error("Sync failed for remaining data in schema: {}, table: {}", obj.getSchemaName(), obj.getTableName()); - hasSyncFailed = true; - setResponseStatus(groupIdStatus, groupId, "failed", responseStatus); - break; + failedRecords += syncDataBatch.size(); + groupHasFailures = true; + } else if ("Data successfully synced".equals(serverAcknowledgement)) { + successfulRecords += syncDataBatch.size(); + } else if (serverAcknowledgement.startsWith("Partial success:")) { + try { + String[] parts = serverAcknowledgement.split(" "); + int batchSuccess = Integer.parseInt(parts[2]); + int batchFailed = Integer.parseInt(parts[5]); + successfulRecords += batchSuccess; + failedRecords += batchFailed; + } catch (Exception e) { + logger.warn("Could not parse partial success counts for remainder: {}", + serverAcknowledgement); + successfulRecords += syncDataBatch.size() / 2; + failedRecords += syncDataBatch.size() - (syncDataBatch.size() / 2); + } + } else if ("Sync failed".equals(serverAcknowledgement)) { + failedRecords += syncDataBatch.size(); + groupHasFailures = true; } } - if (!hasSyncFailed) { - logger.info("Data sync completed for schema: {}, table: {}", obj.getSchemaName(), - obj.getTableName()); - setResponseStatus(groupIdStatus, groupId, "completed", responseStatus); + // Determine table status based on success/failure counts + String tableStatus; + if (successfulRecords == totalRecords && failedRecords == 0) { + tableStatus = "success"; + } else if (failedRecords == totalRecords && successfulRecords == 0) { + tableStatus = "failed"; + groupHasFailures = true; + } else if (successfulRecords > 0 && failedRecords > 0) { + tableStatus = "partial"; + } else { + tableStatus = "failed"; // Default to failed if unclear + groupHasFailures = true; } + + // Create detailed table info + Map tableDetails = new HashMap<>(); + tableDetails.put("tableName", obj.getTableName()); + tableDetails.put("schemaName", obj.getSchemaName()); + tableDetails.put("status", tableStatus); + tableDetails.put("totalRecords", totalRecords); + tableDetails.put("successfulRecords", successfulRecords); + tableDetails.put("failedRecords", failedRecords); + tableDetailsList.add(tableDetails); + + logger.info("Table sync summary - {}: {} (Success: {}, Failed: {}, Total: {})", + tableKey, tableStatus, successfulRecords, failedRecords, totalRecords); + } else { logger.info("No data to sync for schema {} and table {}", obj.getSchemaName(), obj.getTableName()); - setResponseStatus(groupIdStatus, groupId, "completed", responseStatus); + + Map tableDetails = new HashMap<>(); + tableDetails.put("tableName", obj.getTableName()); + tableDetails.put("schemaName", obj.getSchemaName()); + tableDetails.put("status", "no_data"); + tableDetails.put("totalRecords", 0); + tableDetails.put("successfulRecords", 0); + tableDetails.put("failedRecords", 0); + tableDetailsList.add(tableDetails); } - if (hasSyncFailed) { - // Mark all subsequent groups as "pending" - for (DataSyncGroups remainingGroup : dataSyncGroupList - .subList(dataSyncGroupList.indexOf(dataSyncGroups) + 1, dataSyncGroupList.size())) { - Map pendingGroupIdStatus = new HashMap<>(); - pendingGroupIdStatus.put("groupId", String.valueOf(remainingGroup.getSyncTableGroupID())); - pendingGroupIdStatus.put("status", "pending"); - responseStatus.add(pendingGroupIdStatus); - } + // If this table had critical failures, stop processing this group + if (tableHasError) { + hasSyncFailed = true; break; } } + + // Determine overall group status + String groupStatus; + long successTables = tableDetailsList.stream() + .filter(table -> "success".equals(table.get("status")) || "no_data".equals(table.get("status"))) + .count(); + long partialTables = tableDetailsList.stream() + .filter(table -> "partial".equals(table.get("status"))) + .count(); + long failedTables = tableDetailsList.stream() + .filter(table -> "failed".equals(table.get("status"))) + .count(); + + if (failedTables == 0 && partialTables == 0) { + groupStatus = "completed"; + } else if (failedTables > 0 && successTables == 0 && partialTables == 0) { + groupStatus = "failed"; + } else { + groupStatus = "partial"; + } + + // Create group response + Map groupResponse = new HashMap<>(); + groupResponse.put("groupId", groupId); + groupResponse.put("groupName", groupName != null ? groupName : "Group " + groupId); + groupResponse.put("status", groupStatus); + groupResponse.put("tables", tableDetailsList); + groupResponse.put("summary", Map.of( + "totalTables", tableDetailsList.size(), + "successfulTables", successTables, + "partialTables", partialTables, + "failedTables", failedTables)); + + responseStatus.add(groupResponse); + + if (hasSyncFailed) { + // Mark all subsequent groups as "pending" + for (int j = dataSyncGroupList.indexOf(dataSyncGroups) + 1; j < dataSyncGroupList.size(); j++) { + DataSyncGroups remainingGroup = dataSyncGroupList.get(j); + Map pendingGroupResponse = new HashMap<>(); + pendingGroupResponse.put("groupId", remainingGroup.getSyncTableGroupID()); + pendingGroupResponse.put("groupName", + remainingGroup.getSyncTableGroupName() != null ? remainingGroup.getSyncTableGroupName() + : "Group " + remainingGroup.getSyncTableGroupID()); + pendingGroupResponse.put("status", "pending"); + pendingGroupResponse.put("tables", new ArrayList<>()); + pendingGroupResponse.put("summary", Map.of( + "totalTables", 0, + "successfulTables", 0L, + "partialTables", 0L, + "failedTables", 0L)); + responseStatus.add(pendingGroupResponse); + } + break; + } } + // Create final response + Map finalResponse = new HashMap<>(); if (hasSyncFailed) { - Map response = new HashMap<>(); - response.put("response", "Data sync failed"); - response.put("groupsProgress", responseStatus); - objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(response); + finalResponse.put("response", "Data sync failed"); + finalResponse.put("groupsProgress", responseStatus); return objectMapper.writerWithDefaultPrettyPrinter() - .writeValueAsString(Collections.singletonMap("data", response)); + .writeValueAsString(Collections.singletonMap("data", finalResponse)); } else { - if ("No data to sync".equals(serverAcknowledgement)) { - return serverAcknowledgement; + // Check if there was any data to sync + boolean hasData = responseStatus.stream() + .anyMatch(group -> { + @SuppressWarnings("unchecked") + List> tables = (List>) ((Map) group) + .get("tables"); + return tables.stream().anyMatch(table -> !("no_data".equals(table.get("status")))); + }); + + if (!hasData) { + return "No data to sync"; } else { - return "Data successfully synced"; + finalResponse.put("response", "Data sync completed"); + finalResponse.put("groupsProgress", responseStatus); + return objectMapper.writerWithDefaultPrettyPrinter() + .writeValueAsString(Collections.singletonMap("data", finalResponse)); } } } - private void setResponseStatus(Map groupIdStatus, int groupId, String serverAcknowledgement, - List> responseStatus) { - groupIdStatus.put("groupId", String.valueOf(groupId)); - groupIdStatus.put("status", serverAcknowledgement); - responseStatus.add(groupIdStatus); - } - - /** - * - * @param syncTableDetailsIDs - * @return - */ - private List getVanAndServerColumns(Integer groupID) throws Exception { List syncUtilityClassList = getVanAndServerColumnList(groupID); logger.debug("Fetched SyncUtilityClass list for groupID {}: {}", groupID, syncUtilityClassList); - return syncUtilityClassList; } @@ -266,23 +386,13 @@ public List getVanAndServerColumnList(Integer groupID) throws return syncUtilityClassList; } - /** - * - * @param schemaName - * @param tableName - * @param columnNames - * @return - */ - private List> getDataToSync(String schemaName, String tableName, String columnNames) throws Exception { - logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); + logger.info("Fetching data to sync for schema: {}, table: {}, columns: {}", schemaName, tableName, columnNames); List> resultSetList = dataSyncRepository.getDataForGivenSchemaAndTable(schemaName, tableName, columnNames); if (resultSetList != null) { logger.debug("Fetched {} records for schema '{}', table '{}'", resultSetList.size(), schemaName, tableName); - // Optionally log a sample of the resultSetList for verification (be careful - // with large datasets) if (!resultSetList.isEmpty()) { logger.debug("Sample record: {}", resultSetList.get(0)); } @@ -292,14 +402,6 @@ private List> getDataToSync(String schemaName, String tableN return resultSetList; } - /** - * - * @param syncData - * @param startIndex - * @param size - * @return - */ - private List> getBatchOfAskedSizeDataToSync(List> syncData, int startIndex, int size) throws Exception { List> syncDataOfBatchSize = syncData.subList(startIndex, (startIndex + size)); @@ -307,23 +409,13 @@ private List> getBatchOfAskedSizeDataToSync(List> dataToBesync, String user, String Authorization, - String token) - throws Exception { + String token) throws Exception { RestTemplate restTemplate = new RestTemplate(); - Integer facilityID = masterVanRepo.getFacilityID(vanID); // serialize null @@ -342,171 +434,77 @@ public String syncDataToServer(int vanID, String schemaName, String tableName, S dataMap.put("facilityID", facilityID); String requestOBJ = gson.toJson(dataMap); - logger.info("Request obj="+requestOBJ); + logger.info("Request obj=" + requestOBJ); HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, String.class); -logger.info("Response for thes erver="+response); -logger.info("Response body="+response.getBody()); - int successCount = 0; - int failCount = 0; - List successVanSerialNos = new ArrayList<>(); - List failedVanSerialNos = new ArrayList<>(); - - // if (response != null && response.hasBody()) { - // JSONObject obj = new JSONObject(response.getBody()); - // if (obj.has("data")) { - // JSONObject dataObj = obj.getJSONObject("data"); - // if (dataObj.has("records")) { - // JSONArray recordsArr = dataObj.getJSONArray("records"); - // for (int i = 0; i < recordsArr.length(); i++) { - // JSONObject record = recordsArr.getJSONObject(i); - // String vanSerialNo = record.getString("vanSerialNo"); - // boolean success = record.getBoolean("success"); - // if (success) { - // successVanSerialNos.add(vanSerialNo); - // successCount++; - // } else { - // failedVanSerialNos.add(vanSerialNo); - // failCount++; - // } - // } - // } - // } - // } - -// if (response != null && response.hasBody()) { -// JSONObject obj = new JSONObject(response.getBody()); -// if (obj.has("data")) { -// JSONObject dataObj = obj.getJSONObject("data"); -// if (dataObj.has("records")) { -// JSONArray recordsArr = dataObj.getJSONArray("records"); -// for (int i = 0; i < recordsArr.length(); i++) { -// JSONObject record = recordsArr.getJSONObject(i); -// String vanSerialNo = record.getString("vanSerialNo"); -// boolean success = record.getBoolean("success"); -// if (success) { -// successVanSerialNos.add(vanSerialNo); -// successCount++; -// } else { -// failedVanSerialNos.add(vanSerialNo); -// failCount++; -// } -// } -// } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { -// // Handle summary response for m_beneficiaryregidmapping -// String respMsg = dataObj.optString("response", ""); -// int statusCode = obj.optInt("statusCode", 0); -// if (respMsg.toLowerCase().contains("success") && statusCode == 200) { -// // All records are successful -// for (Map map : dataToBesync) { -// successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); -// } -// successCount = successVanSerialNos.size(); -// } else { -// // All records failed -// for (Map map : dataToBesync) { -// failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); -// } -// failCount = failedVanSerialNos.size(); -// } -// } -// } -// } - -if (response != null && response.hasBody()) { - JSONObject obj = new JSONObject(response.getBody()); - boolean handled = false; - if (obj.has("data")) { - JSONObject dataObj = obj.getJSONObject("data"); - if (dataObj.has("records")) { - JSONArray recordsArr = dataObj.getJSONArray("records"); - for (int i = 0; i < recordsArr.length(); i++) { - JSONObject record = recordsArr.getJSONObject(i); - String vanSerialNo = record.getString("vanSerialNo"); - boolean success = record.getBoolean("success"); - if (success) { - successVanSerialNos.add(vanSerialNo); - successCount++; - } else { - failedVanSerialNos.add(vanSerialNo); - failCount++; - } - } - handled = true; - } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { - String respMsg = dataObj.optString("response", ""); - int statusCode = obj.optInt("statusCode", 0); - if (respMsg.toLowerCase().contains("success") && statusCode == 200) { - for (Map map : dataToBesync) { - successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); - } - successCount = successVanSerialNos.size(); - } else { - for (Map map : dataToBesync) { - failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); - } - failCount = failedVanSerialNos.size(); - } - handled = true; - } - } - // Handle unexpected error response (like statusCode 5000) - if (!handled) { - int statusCode = obj.optInt("statusCode", 0); - String errorMsg = obj.optString("errorMessage", "Unknown error"); - if (statusCode >= 5000) { - // Mark all as failed and log error - for (Map map : dataToBesync) { - failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); - } - failCount = failedVanSerialNos.size(); - logger.error("Server error for table {}: {}", tableName, errorMsg); - } - } -} - -logger.info("Success Van Serial No="+successVanSerialNos.toString()); -logger.info("Failed Van Serial No="+failedVanSerialNos.toString()); - // Update processed flag for success and failed vanSerialNos - if (!successVanSerialNos.isEmpty()) { - dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, - vanAutoIncColumnName, user, "P"); - } - if (!failedVanSerialNos.isEmpty()) { - dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, failedVanSerialNos, - vanAutoIncColumnName, user, "F"); - } - - if (successCount > 0 && failCount == 0) - return "Data successfully synced"; - else if (successCount > 0 && failCount > 0) - return "Partial success: " + successCount + " records synced, " + failCount + " failed"; - else - return "Sync failed"; - } - // int i = 0; - // if (response != null && response.hasBody()) { - // JSONObject obj = new JSONObject(response.getBody()); - // if (obj != null && obj.has("statusCode") && obj.getInt("statusCode") == 200) { - // StringBuilder vanSerialNos = getVanSerialNoListForSyncedData(vanAutoIncColumnName, dataToBesync); - - // i = dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, vanSerialNos, - // vanAutoIncColumnName, user); - // } - // } - // if (i > 0) - // return "Data successfully synced"; - // else - // return null; - // } + logger.info("Response for the server=" + response); + logger.info("Response body=" + response.getBody()); + + int successCount = 0; + int failCount = 0; + List successVanSerialNos = new ArrayList<>(); + List failedVanSerialNos = new ArrayList<>(); + + if (response != null && response.hasBody()) { + JSONObject obj = new JSONObject(response.getBody()); + if (obj.has("data")) { + JSONObject dataObj = obj.getJSONObject("data"); + if (dataObj.has("records")) { + JSONArray recordsArr = dataObj.getJSONArray("records"); + for (int i = 0; i < recordsArr.length(); i++) { + JSONObject record = recordsArr.getJSONObject(i); + String vanSerialNo = record.getString("vanSerialNo"); + boolean success = record.getBoolean("success"); + if (success) { + successVanSerialNos.add(vanSerialNo); + successCount++; + } else { + failedVanSerialNos.add(vanSerialNo); + failCount++; + } + } + } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { + // Handle summary response for m_beneficiaryregidmapping + String respMsg = dataObj.optString("response", ""); + int statusCode = obj.optInt("statusCode", 0); + if (respMsg.toLowerCase().contains("success") && statusCode == 200) { + // All records are successful + for (Map map : dataToBesync) { + successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + successCount = successVanSerialNos.size(); + } else { + // All records failed + for (Map map : dataToBesync) { + failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + failCount = failedVanSerialNos.size(); + } + } + } + } - /** - * - * @param vanAutoIncColumnName - * @param dataToBesync - * @return - */ + logger.info("Success Van Serial No=" + successVanSerialNos.toString()); + logger.info("Failed Van Serial No=" + failedVanSerialNos.toString()); + + // Update processed flag for success and failed vanSerialNos + if (!successVanSerialNos.isEmpty()) { + dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, + vanAutoIncColumnName, user, "P"); + } + if (!failedVanSerialNos.isEmpty()) { + dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, failedVanSerialNos, + vanAutoIncColumnName, user, "F"); + } + + if (successCount > 0 && failCount == 0) + return "Data successfully synced"; + else if (successCount > 0 && failCount > 0) + return "Partial success: " + successCount + " records synced, " + failCount + " failed"; + else + return "Sync failed"; + } public StringBuilder getVanSerialNoListForSyncedData(String vanAutoIncColumnName, List> dataToBesync) throws Exception { @@ -532,5 +530,4 @@ public String getDataSyncGroupDetails() { else return null; } - -} +} \ No newline at end of file From 261e9a09a98d20141774c4869d126275e0928e01 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 14:18:03 +0530 Subject: [PATCH 38/45] fix: address duplication issue --- .../GetDataFromVanAndSyncToDBImpl.java | 731 ++++++++++++------ 1 file changed, 503 insertions(+), 228 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 15e60029..0ee9f25d 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -313,273 +313,548 @@ private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableN * handlers. */ - private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, List syncResults) { - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncDataListInsert = new ArrayList<>(); - List syncDataListUpdate = new ArrayList<>(); - // List syncResults = new ArrayList<>(); - - if (dataToBesync == null || dataToBesync.isEmpty()) { - logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); - return true; // Nothing to sync, consider it a success - } +// private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, List syncResults) { +// List> dataToBesync = syncUploadDataDigester.getSyncData(); +// List syncDataListInsert = new ArrayList<>(); +// List syncDataListUpdate = new ArrayList<>(); +// // List syncResults = new ArrayList<>(); + +// boolean overallSuccess = true; + +// if (dataToBesync == null || dataToBesync.isEmpty()) { +// logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); +// return true; // Nothing to sync, consider it a success +// } + +// String syncTableName = syncUploadDataDigester.getTableName(); +// String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); +// String schemaName = syncUploadDataDigester.getSchemaName(); +// Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); +// String serverColumns = syncUploadDataDigester.getServerColumns(); + +// int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); + +// List serverColumnsList = Arrays.asList(serverColumns.split(",")); + +// for (Map map : dataToBesync) { +// // Create a new map with clean column names as keys +// Map cleanRecord = new HashMap<>(); +// for (String key : map.keySet()) { +// String cleanKey = key; +// // Handle keys with SQL functions like date_format +// if (key.startsWith("date_format(") && key.endsWith(")")) { +// int start = key.indexOf("(") + 1; +// int end = key.indexOf(","); +// if (end > start) { +// cleanKey = key.substring(start, end).trim(); +// } else { +// // Fallback if format is unexpected +// cleanKey = key.substring(start, key.indexOf(")")).trim(); +// } +// } +// cleanRecord.put(cleanKey.trim(), map.get(key)); +// } + +// String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); +// String vanID = String.valueOf(cleanRecord.get("VanID")); +// int syncFacilityID = 0; + +// // Update SyncedBy and SyncedDate in the xmap itself before processing +// cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); +// cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); + +// if (facilityIDFromDigester != null) { +// // Determine the 'Processed' status based on facility ID for specific tables +// switch (syncTableName.toLowerCase()) { +// case "t_indent": +// case "t_indentorder": { +// if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { +// Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); +// if (fromFacilityID.intValue() == facilityIDFromDigester) { +// cleanRecord.put("Processed", "P"); +// } +// } +// break; +// } +// case "t_indentissue": { +// if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { +// Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); +// if (toFacilityID.intValue() == facilityIDFromDigester) { +// cleanRecord.put("Processed", "P"); +// } +// } +// break; +// } +// case "t_stocktransfer": { +// if (cleanRecord.containsKey("TransferToFacilityID") +// && cleanRecord.get("TransferToFacilityID") instanceof Number) { +// Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); +// if (transferToFacilityID.intValue() == facilityIDFromDigester) { +// cleanRecord.put("Processed", "P"); +// } +// } +// break; +// } +// case "t_itemstockentry": { +// if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { +// Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); +// if (mapFacilityID.intValue() == facilityIDFromDigester) { +// cleanRecord.put("Processed", "P"); +// } +// } +// break; +// } +// default: +// // No specific facility ID logic for other tables +// break; +// } +// } + +// // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot +// if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { +// syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); +// } + +// int recordCheck; +// try { +// recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( +// schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); +// logger.info("Test Record check="+recordCheck); +// logger.info("Test all the data="+ schemaName +":: Tble="+ syncTableName+":: vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: vanAutoIncColumnName="+vanAutoIncColumnName+":: syncFacilityID="+syncFacilityID); +// } catch (Exception e) { +// logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", +// syncTableName, vanSerialNo, vanID, e.getMessage(), e); +// return false; // Critical error, stop sync for this table +// } + +// // Prepare Object array for insert/update +// List currentRecordValues = new ArrayList<>(); +// for (String column : serverColumnsList) { +// Object value = cleanRecord.get(column.trim()); +// if (value instanceof Boolean) { +// currentRecordValues.add(value); +// } else if (value != null) { +// currentRecordValues.add(String.valueOf(value)); +// } else { +// currentRecordValues.add(null); +// } +// } + +// Object[] objArr = currentRecordValues.toArray(); +// logger.info("Test Obj Arr {}",objArr); +// if (recordCheck == 0) { +// syncDataListInsert.add(objArr); +// syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); +// } else { +// // For update, append the WHERE clause parameters at the end of the array +// List updateParams = new ArrayList<>(Arrays.asList(objArr)); +// updateParams.add(String.valueOf(vanSerialNo)); + +// if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", +// "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", +// "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") +// .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { +// updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); +// } else { +// updateParams.add(String.valueOf(vanID)); +// } +// syncDataListUpdate.add(updateParams.toArray()); +// syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); + +// } +// } + +// boolean insertSuccess = true; +// boolean updateSuccess = true; + +// if (!syncDataListInsert.isEmpty()) { +// String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); +// logger.info("Query Insert="+queryInsert); +// try { +// int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); +// for (int k = 0; k < i.length; k++) { +// if (i[k] < 1) { +// syncResults.set(k, new SyncResult(schemaName, syncTableName, +// String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), // VanSerialNo position +// syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); +// } +// } +// } +// catch (Exception e) { +// String shortReason; +// if (e.getMessage() != null) { +// if (e.getMessage().toLowerCase().contains("duplicate")) { +// shortReason = "Duplicate entry"; +// } else if (e.getMessage().toLowerCase().contains("constraint")) { +// shortReason = "Constraint violation"; +// } else if (e.getMessage().toLowerCase().contains("timeout")) { +// shortReason = "DB timeout"; +// } else { +// shortReason = "Insert/Update failed"; +// } +// } else { +// shortReason = "Unknown DB error"; +// } +// // Always add, never set, to avoid index errors +// for (int k = 0; k < syncDataListInsert.size(); k++) { +// syncResults.add(new SyncResult(schemaName, syncTableName, +// String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), +// syncUploadDataDigester.getSyncedBy(), false, shortReason)); +// } +// } + +// // try { +// // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, +// // serverColumns, queryInsert, syncDataListInsert); +// // logger.info("Insert result array length: {}", i.length); +// // logger.info("Expected insert size: {}", syncDataListInsert.size()); +// // if (i.length != syncDataListInsert.size()) { +// // insertSuccess = false; +// // logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", +// // syncTableName, syncDataListInsert.size(), i.length, +// // getFailedRecords(i, syncDataListInsert)); +// // } else { +// // logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); +// // } +// // } catch (Exception e) { +// // insertSuccess = false; +// // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListInsert)); +// // logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); +// // } +// } + +// if (!syncDataListUpdate.isEmpty()) { +// String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); +// logger.info("Query Update="+queryUpdate); +// try { +// int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryUpdate, syncDataListUpdate); +// logger.info("Test Update result array length: {}", i.length); +// logger.info("Test Expected update size: {}", syncDataListUpdate.size()); +// for (int k = 0; k < i.length; k++) { +// if (i[k] < 1) { +// syncResults.set(k, new SyncResult(schemaName, syncTableName, +// String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), // VanSerialNo position +// syncUploadDataDigester.getSyncedBy(), false, "Update failed")); +// } +// } +// } +// catch (Exception e) { +// String shortReason; +// if (e.getMessage() != null) { +// if (e.getMessage().toLowerCase().contains("duplicate")) { +// shortReason = "Duplicate entry"; +// } else if (e.getMessage().toLowerCase().contains("constraint")) { +// shortReason = "Constraint violation"; +// } else if (e.getMessage().toLowerCase().contains("timeout")) { +// shortReason = "DB timeout"; +// } else { +// shortReason = "Insert/Update failed"; +// } +// } else { +// shortReason = "Unknown DB error"; +// } +// for (int k = 0; k < syncDataListUpdate.size(); k++) { +// syncResults.add(new SyncResult(schemaName, syncTableName, +// String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), +// syncUploadDataDigester.getSyncedBy(), false, shortReason)); +// } +// } + +// // try { +// // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, +// // SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); +// // logger.info("Update result array length: {}", j.length); +// // logger.info("Expected update size: {}", syncDataListUpdate.size()); +// // if (j.length != syncDataListUpdate.size()) { +// // updateSuccess = false; +// // logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", +// // syncTableName, syncDataListUpdate.size(), j.length, +// // getFailedRecords(j, syncDataListUpdate)); +// // } else { +// // logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); +// // } +// // } catch (Exception e) { +// // updateSuccess = false; +// // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListUpdate)); +// // logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); +// // } +// } +// logger.info("Sync results for table {}: {}", syncTableName, syncResults); +// return insertSuccess && updateSuccess; +// } + +private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, List syncResults) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncDataListInsert = new ArrayList<>(); + List syncDataListUpdate = new ArrayList<>(); + + // Track indices for insert and update operations + Map insertIndexMap = new HashMap<>(); // syncResults index -> insert list index + Map updateIndexMap = new HashMap<>(); // syncResults index -> update list index + + boolean overallSuccess = true; + + if (dataToBesync == null || dataToBesync.isEmpty()) { + logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); + return true; + } - String syncTableName = syncUploadDataDigester.getTableName(); - String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); - String schemaName = syncUploadDataDigester.getSchemaName(); - Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); - String serverColumns = syncUploadDataDigester.getServerColumns(); + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String schemaName = syncUploadDataDigester.getSchemaName(); + Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + String serverColumns = syncUploadDataDigester.getServerColumns(); int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); - - List serverColumnsList = Arrays.asList(serverColumns.split(",")); - - for (Map map : dataToBesync) { - // Create a new map with clean column names as keys - Map cleanRecord = new HashMap<>(); - for (String key : map.keySet()) { - String cleanKey = key; - // Handle keys with SQL functions like date_format - if (key.startsWith("date_format(") && key.endsWith(")")) { - int start = key.indexOf("(") + 1; - int end = key.indexOf(","); - if (end > start) { - cleanKey = key.substring(start, end).trim(); - } else { - // Fallback if format is unexpected - cleanKey = key.substring(start, key.indexOf(")")).trim(); - } + List serverColumnsList = Arrays.asList(serverColumns.split(",")); + + for (Map map : dataToBesync) { + // Create a new map with clean column names as keys + Map cleanRecord = new HashMap<>(); + for (String key : map.keySet()) { + String cleanKey = key; + // Handle keys with SQL functions like date_format + if (key.startsWith("date_format(") && key.endsWith(")")) { + int start = key.indexOf("(") + 1; + int end = key.indexOf(","); + if (end > start) { + cleanKey = key.substring(start, end).trim(); + } else { + cleanKey = key.substring(start, key.indexOf(")")).trim(); } - cleanRecord.put(cleanKey.trim(), map.get(key)); } + cleanRecord.put(cleanKey.trim(), map.get(key)); + } - String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); - String vanID = String.valueOf(cleanRecord.get("VanID")); - int syncFacilityID = 0; - - // Update SyncedBy and SyncedDate in the xmap itself before processing - cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); - cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); - - if (facilityIDFromDigester != null) { - // Determine the 'Processed' status based on facility ID for specific tables - switch (syncTableName.toLowerCase()) { - case "t_indent": - case "t_indentorder": { - if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { - Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); - if (fromFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); - } + String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); + String vanID = String.valueOf(cleanRecord.get("VanID")); + int syncFacilityID = 0; + + // Update SyncedBy and SyncedDate in the cleanRecord + cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); + + if (facilityIDFromDigester != null) { + // Determine the 'Processed' status based on facility ID for specific tables + switch (syncTableName.toLowerCase()) { + case "t_indent": + case "t_indentorder": { + if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { + Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); + if (fromFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_indentissue": { - if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { - Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); - if (toFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); - } + break; + } + case "t_indentissue": { + if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { + Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); + if (toFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_stocktransfer": { - if (cleanRecord.containsKey("TransferToFacilityID") - && cleanRecord.get("TransferToFacilityID") instanceof Number) { - Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); - if (transferToFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); - } + break; + } + case "t_stocktransfer": { + if (cleanRecord.containsKey("TransferToFacilityID") + && cleanRecord.get("TransferToFacilityID") instanceof Number) { + Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); + if (transferToFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - case "t_itemstockentry": { - if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { - Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); - if (mapFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); - } + break; + } + case "t_itemstockentry": { + if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { + Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); + if (mapFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); } - break; } - default: - // No specific facility ID logic for other tables - break; + break; } + default: + break; } + } + + // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { + syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); + } - // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot - if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { - syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); + int recordCheck; + try { + recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + logger.info("Record check result: {}", recordCheck); + } catch (Exception e) { + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", + syncTableName, vanSerialNo, vanID, e.getMessage(), e); + + // Add failed result for this record + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, "Record check failed")); + continue; // Skip to next record + } + + // Prepare Object array for insert/update + List currentRecordValues = new ArrayList<>(); + for (String column : serverColumnsList) { + Object value = cleanRecord.get(column.trim()); + if (value instanceof Boolean) { + currentRecordValues.add(value); + } else if (value != null) { + currentRecordValues.add(String.valueOf(value)); + } else { + currentRecordValues.add(null); } + } - int recordCheck; - try { - recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); - logger.info("Test Record check="+recordCheck); - logger.info("Test all the data="+ schemaName +":: Tble="+ syncTableName+":: vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: vanAutoIncColumnName="+vanAutoIncColumnName+":: syncFacilityID="+syncFacilityID); - } catch (Exception e) { - logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", - syncTableName, vanSerialNo, vanID, e.getMessage(), e); - return false; // Critical error, stop sync for this table + Object[] objArr = currentRecordValues.toArray(); + + // Add to syncResults first, then track the index + int currentSyncResultIndex = syncResults.size(); + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), true, null)); // Initially set as success + + if (recordCheck == 0) { + // Record doesn't exist - INSERT + insertIndexMap.put(currentSyncResultIndex, syncDataListInsert.size()); + syncDataListInsert.add(objArr); + } else { + // Record exists - UPDATE + List updateParams = new ArrayList<>(Arrays.asList(objArr)); + updateParams.add(String.valueOf(vanSerialNo)); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { + updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + } else { + updateParams.add(String.valueOf(vanID)); } + + updateIndexMap.put(currentSyncResultIndex, syncDataListUpdate.size()); + syncDataListUpdate.add(updateParams.toArray()); + } + } - // Prepare Object array for insert/update - List currentRecordValues = new ArrayList<>(); - for (String column : serverColumnsList) { - Object value = cleanRecord.get(column.trim()); - if (value instanceof Boolean) { - currentRecordValues.add(value); - } else if (value != null) { - currentRecordValues.add(String.valueOf(value)); + boolean insertSuccess = true; + boolean updateSuccess = true; + + // Process INSERT operations + if (!syncDataListInsert.isEmpty()) { + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + logger.info("Insert Query: {}", queryInsert); + + try { + int[] insertResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + serverColumns, queryInsert, syncDataListInsert); + + // Update syncResults based on insert results + for (Map.Entry entry : insertIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int insertListIndex = entry.getValue(); + + if (insertListIndex < insertResults.length && insertResults[insertListIndex] > 0) { + // Success - keep the existing success entry + logger.info("Successfully inserted record at index {}", insertListIndex); } else { - currentRecordValues.add(null); + // Failed - update the syncResults entry + String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); + insertSuccess = false; } } + + } catch (Exception e) { + insertSuccess = false; + logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + + String shortReason = getShortErrorReason(e); + + // Update all insert-related syncResults to failed + for (Map.Entry entry : insertIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int insertListIndex = entry.getValue(); + String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); + + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, shortReason)); + } + } + } - Object[] objArr = currentRecordValues.toArray(); - logger.info("Test Obj Arr {}",objArr); - if (recordCheck == 0) { - syncDataListInsert.add(objArr); - syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); - } else { - // For update, append the WHERE clause parameters at the end of the array - List updateParams = new ArrayList<>(Arrays.asList(objArr)); - updateParams.add(String.valueOf(vanSerialNo)); - - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { - updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + // Process UPDATE operations + if (!syncDataListUpdate.isEmpty()) { + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + logger.info("Update Query: {}", queryUpdate); + + try { + int[] updateResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + serverColumns, queryUpdate, syncDataListUpdate); + + // Update syncResults based on update results + for (Map.Entry entry : updateIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int updateListIndex = entry.getValue(); + + if (updateListIndex < updateResults.length && updateResults[updateListIndex] > 0) { + // Success - keep the existing success entry + logger.info("Successfully updated record at index {}", updateListIndex); } else { - updateParams.add(String.valueOf(vanID)); + // Failed - update the syncResults entry + String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, "Update failed")); + updateSuccess = false; } - syncDataListUpdate.add(updateParams.toArray()); - syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); - } - } - - boolean insertSuccess = true; - boolean updateSuccess = true; - - if (!syncDataListInsert.isEmpty()) { - String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); - logger.info("Query Insert="+queryInsert); - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); - for (int k = 0; k < i.length; k++) { - if (i[k] < 1) { - syncResults.set(k, new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), // VanSerialNo position - syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); + + } catch (Exception e) { + updateSuccess = false; + logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + + String shortReason = getShortErrorReason(e); + + // Update all update-related syncResults to failed + for (Map.Entry entry : updateIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int updateListIndex = entry.getValue(); + String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); + + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, shortReason)); } } - } - catch (Exception e) { - String shortReason; - if (e.getMessage() != null) { - if (e.getMessage().toLowerCase().contains("duplicate")) { - shortReason = "Duplicate entry"; - } else if (e.getMessage().toLowerCase().contains("constraint")) { - shortReason = "Constraint violation"; - } else if (e.getMessage().toLowerCase().contains("timeout")) { - shortReason = "DB timeout"; - } else { - shortReason = "Insert/Update failed"; - } - } else { - shortReason = "Unknown DB error"; } - // Always add, never set, to avoid index errors - for (int k = 0; k < syncDataListInsert.size(); k++) { - syncResults.add(new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), - syncUploadDataDigester.getSyncedBy(), false, shortReason)); - } -} - // try { - // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - // serverColumns, queryInsert, syncDataListInsert); - // logger.info("Insert result array length: {}", i.length); - // logger.info("Expected insert size: {}", syncDataListInsert.size()); - // if (i.length != syncDataListInsert.size()) { - // insertSuccess = false; - // logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", - // syncTableName, syncDataListInsert.size(), i.length, - // getFailedRecords(i, syncDataListInsert)); - // } else { - // logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); - // } - // } catch (Exception e) { - // insertSuccess = false; - // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListInsert)); - // logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); - // } - } + logger.info("Sync results for table {}: {}", syncTableName, syncResults); + return insertSuccess && updateSuccess; +} - if (!syncDataListUpdate.isEmpty()) { - String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); - logger.info("Query Update="+queryUpdate); - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryUpdate, syncDataListUpdate); - logger.info("Test Update result array length: {}", i.length); - logger.info("Test Expected update size: {}", syncDataListUpdate.size()); - for (int k = 0; k < i.length; k++) { - if (i[k] < 1) { - syncResults.set(k, new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), // VanSerialNo position - syncUploadDataDigester.getSyncedBy(), false, "Update failed")); - } - } - } - catch (Exception e) { - String shortReason; +// Helper method to get short error reason +private String getShortErrorReason(Exception e) { if (e.getMessage() != null) { - if (e.getMessage().toLowerCase().contains("duplicate")) { - shortReason = "Duplicate entry"; - } else if (e.getMessage().toLowerCase().contains("constraint")) { - shortReason = "Constraint violation"; - } else if (e.getMessage().toLowerCase().contains("timeout")) { - shortReason = "DB timeout"; + String message = e.getMessage().toLowerCase(); + if (message.contains("duplicate")) { + return "Duplicate entry"; + } else if (message.contains("constraint")) { + return "Constraint violation"; + } else if (message.contains("timeout")) { + return "DB timeout"; } else { - shortReason = "Insert/Update failed"; + return "Database error"; } } else { - shortReason = "Unknown DB error"; - } - for (int k = 0; k < syncDataListUpdate.size(); k++) { - syncResults.add(new SyncResult(schemaName, syncTableName, - String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), - syncUploadDataDigester.getSyncedBy(), false, shortReason)); + return "Unknown error"; } } - - // try { - // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - // SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); - // logger.info("Update result array length: {}", j.length); - // logger.info("Expected update size: {}", syncDataListUpdate.size()); - // if (j.length != syncDataListUpdate.size()) { - // updateSuccess = false; - // logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", - // syncTableName, syncDataListUpdate.size(), j.length, - // getFailedRecords(j, syncDataListUpdate)); - // } else { - // logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); - // } - // } catch (Exception e) { - // updateSuccess = false; - // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListUpdate)); - // logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); - // } - } - logger.info("Sync results for table {}: {}", syncTableName, syncResults); - return insertSuccess && updateSuccess; - } private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { String[] columnsArr = null; From 2ce5163a0fc595a7a4725e55291560e7d9278bd8 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 19:38:05 +0530 Subject: [PATCH 39/45] fix: datasync issue --- .../dataSyncActivity/DataSyncRepository.java | 8 +- .../UploadDataToServerImpl.java | 749 ++++++++++++------ .../GetDataFromVanAndSyncToDBImpl.java | 4 +- 3 files changed, 517 insertions(+), 244 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java index c48ea001..bc927104 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/DataSyncRepository.java @@ -99,7 +99,7 @@ public List> getDataForGivenSchemaAndTable(String schema, St } public int updateProcessedFlagInVan(String schemaName, String tableName, List vanSerialNos, - String autoIncreamentColumn, String user, String status) throws Exception { + String autoIncreamentColumn, String user, String status, String reason) throws Exception { jdbcTemplate = getJdbcTemplate(); String query = ""; @@ -107,16 +107,16 @@ public int updateProcessedFlagInVan(String schemaName, String tableName, List> tableDetailsList = new ArrayList<>(); boolean groupHasFailures = false; - for (SyncUtilityClass obj : syncUtilityClassList) { - String tableKey = obj.getSchemaName() + "." + obj.getTableName(); - boolean tableHasError = false; // Move this to the correct scope - - // get data from DB to sync to server - List> syncData = getDataToSync(obj.getSchemaName(), obj.getTableName(), - obj.getVanColumnName()); - logger.debug("Fetched syncData for schema {} and table {}: {}", obj.getSchemaName(), obj.getTableName(), - objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncData)); - - if (syncData != null && syncData.size() > 0) { - int dataSize = syncData.size(); - int startIndex = 0; - int fullBatchCount = dataSize / BATCH_SIZE; - int remainder = dataSize % BATCH_SIZE; - - // Track table-level success/failure counts - int totalRecords = dataSize; - int successfulRecords = 0; - int failedRecords = 0; - - logger.info("Starting batch sync for schema: {}, table: {} with {} full batches and {} remainder", - obj.getSchemaName(), obj.getTableName(), fullBatchCount, remainder); - - // Process full batches - for (int i = 0; i < fullBatchCount && !tableHasError; i++) { - List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, - BATCH_SIZE); - serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), - obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, - Authorization, token); - logger.debug("Server acknowledgement for batch {}: {}", i, serverAcknowledgement); - - // Parse the string response from syncDataToServer method - if (serverAcknowledgement == null) { - logger.error("Sync failed for batch {} in schema: {}, table: {}", i, obj.getSchemaName(), - obj.getTableName()); - tableHasError = true; - failedRecords += syncDataBatch.size(); - groupHasFailures = true; - break; - } else if ("Data successfully synced".equals(serverAcknowledgement)) { - successfulRecords += syncDataBatch.size(); - } else if (serverAcknowledgement.startsWith("Partial success:")) { - // Parse "Partial success: X records synced, Y failed" - try { - String[] parts = serverAcknowledgement.split(" "); - int batchSuccess = Integer.parseInt(parts[2]); - int batchFailed = Integer.parseInt(parts[5]); - successfulRecords += batchSuccess; - failedRecords += batchFailed; - } catch (Exception e) { - logger.warn("Could not parse partial success counts for batch {}: {}", i, - serverAcknowledgement); - // Assume half successful, half failed as fallback - successfulRecords += syncDataBatch.size() / 2; - failedRecords += syncDataBatch.size() - (syncDataBatch.size() / 2); - } - } else if ("Sync failed".equals(serverAcknowledgement)) { - failedRecords += syncDataBatch.size(); - groupHasFailures = true; - } - - startIndex += BATCH_SIZE; - } - - // Process remainder batch if no error occurred - if (!tableHasError && remainder > 0) { - List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, - remainder); - serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), - obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, - Authorization, token); - - if (serverAcknowledgement == null) { - logger.error("Sync failed for remaining data in schema: {}, table: {}", obj.getSchemaName(), - obj.getTableName()); - failedRecords += syncDataBatch.size(); - groupHasFailures = true; - } else if ("Data successfully synced".equals(serverAcknowledgement)) { - successfulRecords += syncDataBatch.size(); - } else if (serverAcknowledgement.startsWith("Partial success:")) { - try { - String[] parts = serverAcknowledgement.split(" "); - int batchSuccess = Integer.parseInt(parts[2]); - int batchFailed = Integer.parseInt(parts[5]); - successfulRecords += batchSuccess; - failedRecords += batchFailed; - } catch (Exception e) { - logger.warn("Could not parse partial success counts for remainder: {}", - serverAcknowledgement); - successfulRecords += syncDataBatch.size() / 2; - failedRecords += syncDataBatch.size() - (syncDataBatch.size() / 2); - } - } else if ("Sync failed".equals(serverAcknowledgement)) { - failedRecords += syncDataBatch.size(); - groupHasFailures = true; - } - } - - // Determine table status based on success/failure counts - String tableStatus; - if (successfulRecords == totalRecords && failedRecords == 0) { - tableStatus = "success"; - } else if (failedRecords == totalRecords && successfulRecords == 0) { - tableStatus = "failed"; - groupHasFailures = true; - } else if (successfulRecords > 0 && failedRecords > 0) { - tableStatus = "partial"; - } else { - tableStatus = "failed"; // Default to failed if unclear - groupHasFailures = true; - } - - // Create detailed table info - Map tableDetails = new HashMap<>(); - tableDetails.put("tableName", obj.getTableName()); - tableDetails.put("schemaName", obj.getSchemaName()); - tableDetails.put("status", tableStatus); - tableDetails.put("totalRecords", totalRecords); - tableDetails.put("successfulRecords", successfulRecords); - tableDetails.put("failedRecords", failedRecords); - tableDetailsList.add(tableDetails); - - logger.info("Table sync summary - {}: {} (Success: {}, Failed: {}, Total: {})", - tableKey, tableStatus, successfulRecords, failedRecords, totalRecords); - - } else { - logger.info("No data to sync for schema {} and table {}", obj.getSchemaName(), obj.getTableName()); - - Map tableDetails = new HashMap<>(); - tableDetails.put("tableName", obj.getTableName()); - tableDetails.put("schemaName", obj.getSchemaName()); - tableDetails.put("status", "no_data"); - tableDetails.put("totalRecords", 0); - tableDetails.put("successfulRecords", 0); - tableDetails.put("failedRecords", 0); - tableDetailsList.add(tableDetails); - } - - // If this table had critical failures, stop processing this group - if (tableHasError) { - hasSyncFailed = true; - break; - } - } - + // for (SyncUtilityClass obj : syncUtilityClassList) { + // String tableKey = obj.getSchemaName() + "." + obj.getTableName(); + // boolean tableHasError = false; // Move this to the correct scope + + // // get data from DB to sync to server + // List> syncData = getDataToSync(obj.getSchemaName(), obj.getTableName(), + // obj.getVanColumnName()); + // logger.debug("Fetched syncData for schema {} and table {}: {}", obj.getSchemaName(), obj.getTableName(), + // objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncData)); + + // if (syncData != null && syncData.size() > 0) { + // int dataSize = syncData.size(); + // int startIndex = 0; + // int fullBatchCount = dataSize / BATCH_SIZE; + // int remainder = dataSize % BATCH_SIZE; + + // // Track table-level success/failure counts + // int totalRecords = dataSize; + // int successfulRecords = 0; + // int failedRecords = 0; + + // logger.info("Starting batch sync for schema: {}, table: {} with {} full batches and {} remainder", + // obj.getSchemaName(), obj.getTableName(), fullBatchCount, remainder); + + // // Process full batches + // for (int i = 0; i < fullBatchCount && !tableHasError; i++) { + // List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, + // BATCH_SIZE); + // serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), + // obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, + // Authorization, token); + // logger.debug("Server acknowledgement for batch {}: {}", i, serverAcknowledgement); + + // // Parse the string response from syncDataToServer method + // if (serverAcknowledgement == null) { + // logger.error("Sync failed for batch {} in schema: {}, table: {}", i, obj.getSchemaName(), + // obj.getTableName()); + // tableHasError = true; + // failedRecords += syncDataBatch.size(); + // groupHasFailures = true; + // break; + // } else if ("Data successfully synced".equals(serverAcknowledgement)) { + // successfulRecords += syncDataBatch.size(); + // } else if (serverAcknowledgement.startsWith("Partial success:")) { + // // Parse "Partial success: X records synced, Y failed" + // try { + // String[] parts = serverAcknowledgement.split(" "); + // int batchSuccess = Integer.parseInt(parts[2]); + // int batchFailed = Integer.parseInt(parts[5]); + // successfulRecords += batchSuccess; + // failedRecords += batchFailed; + // } catch (Exception e) { + // logger.warn("Could not parse partial success counts for batch {}: {}", i, + // serverAcknowledgement); + // // Assume half successful, half failed as fallback + // successfulRecords += syncDataBatch.size() / 2; + // failedRecords += syncDataBatch.size() - (syncDataBatch.size() / 2); + // } + // } else if ("Sync failed".equals(serverAcknowledgement)) { + // failedRecords += syncDataBatch.size(); + // groupHasFailures = true; + // } + + // startIndex += BATCH_SIZE; + // } + + // // Process remainder batch if no error occurred + // if (!tableHasError && remainder > 0) { + // List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, + // remainder); + // serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), + // obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, + // Authorization, token); + + // if (serverAcknowledgement == null) { + // logger.error("Sync failed for remaining data in schema: {}, table: {}", obj.getSchemaName(), + // obj.getTableName()); + // failedRecords += syncDataBatch.size(); + // groupHasFailures = true; + // } else if ("Data successfully synced".equals(serverAcknowledgement)) { + // successfulRecords += syncDataBatch.size(); + // } else if (serverAcknowledgement.startsWith("Partial success:")) { + // try { + // String[] parts = serverAcknowledgement.split(" "); + // int batchSuccess = Integer.parseInt(parts[2]); + // int batchFailed = Integer.parseInt(parts[5]); + // successfulRecords += batchSuccess; + // failedRecords += batchFailed; + // } catch (Exception e) { + // logger.warn("Could not parse partial success counts for remainder: {}", + // serverAcknowledgement); + // successfulRecords += syncDataBatch.size() / 2; + // failedRecords += syncDataBatch.size() - (syncDataBatch.size() / 2); + // } + // } else if ("Sync failed".equals(serverAcknowledgement)) { + // failedRecords += syncDataBatch.size(); + // groupHasFailures = true; + // } + // } + + // // Determine table status based on success/failure counts + // String tableStatus; + // if (successfulRecords == totalRecords && failedRecords == 0) { + // tableStatus = "success"; + // } else if (failedRecords == totalRecords && successfulRecords == 0) { + // tableStatus = "failed"; + // groupHasFailures = true; + // } else if (successfulRecords > 0 && failedRecords > 0) { + // tableStatus = "partial"; + // } else { + // tableStatus = "failed"; // Default to failed if unclear + // groupHasFailures = true; + // } + + // // Create detailed table info + // Map tableDetails = new HashMap<>(); + // tableDetails.put("tableName", obj.getTableName()); + // tableDetails.put("schemaName", obj.getSchemaName()); + // tableDetails.put("status", tableStatus); + // tableDetails.put("totalRecords", totalRecords); + // tableDetails.put("successfulRecords", successfulRecords); + // tableDetails.put("failedRecords", failedRecords); + // tableDetailsList.add(tableDetails); + + // logger.info("Table sync summary - {}: {} (Success: {}, Failed: {}, Total: {})", + // tableKey, tableStatus, successfulRecords, failedRecords, totalRecords); + + // } else { + // logger.info("No data to sync for schema {} and table {}", obj.getSchemaName(), obj.getTableName()); + + // Map tableDetails = new HashMap<>(); + // tableDetails.put("tableName", obj.getTableName()); + // tableDetails.put("schemaName", obj.getSchemaName()); + // tableDetails.put("status", "no_data"); + // tableDetails.put("totalRecords", 0); + // tableDetails.put("successfulRecords", 0); + // tableDetails.put("failedRecords", 0); + // tableDetailsList.add(tableDetails); + // } + + // // If this table had critical failures, stop processing this group + // if (tableHasError) { + // hasSyncFailed = true; + // break; + // } + // } + +// Updated portion of startDataSync method - replace the batch processing section +for (SyncUtilityClass obj : syncUtilityClassList) { + String tableKey = obj.getSchemaName() + "." + obj.getTableName(); + boolean tableHasError = false; + + // get data from DB to sync to server + List> syncData = getDataToSync(obj.getSchemaName(), obj.getTableName(), + obj.getVanColumnName()); + logger.debug("Fetched syncData for schema {} and table {}: {}", obj.getSchemaName(), obj.getTableName(), + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncData)); + + if (syncData != null && syncData.size() > 0) { + int dataSize = syncData.size(); + int startIndex = 0; + int fullBatchCount = dataSize / BATCH_SIZE; + int remainder = dataSize % BATCH_SIZE; + + // Track table-level success/failure counts + int totalRecords = dataSize; + int successfulRecords = 0; + int failedRecords = 0; + List tableFailureReasons = new ArrayList<>(); // Collect all failure reasons for this table + + logger.info("Starting batch sync for schema: {}, table: {} with {} full batches and {} remainder", + obj.getSchemaName(), obj.getTableName(), fullBatchCount, remainder); + + // Process full batches + for (int i = 0; i < fullBatchCount && !tableHasError; i++) { + List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, + BATCH_SIZE); + + // Updated to handle Map return type + Map syncResult = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), + obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, + Authorization, token); + + if (syncResult == null) { + logger.error("Sync failed for batch {} in schema: {}, table: {}", i, obj.getSchemaName(), + obj.getTableName()); + tableHasError = true; + failedRecords += syncDataBatch.size(); + groupHasFailures = true; + break; + } + + String status = (String) syncResult.get("status"); + int batchSuccessCount = (Integer) syncResult.get("successCount"); + int batchFailCount = (Integer) syncResult.get("failCount"); + @SuppressWarnings("unchecked") + List batchFailureReasons = (List) syncResult.get("failureReasons"); + + successfulRecords += batchSuccessCount; + failedRecords += batchFailCount; + + // Add batch failure reasons to table failure reasons + if (batchFailureReasons != null && !batchFailureReasons.isEmpty()) { + tableFailureReasons.addAll(batchFailureReasons); + groupHasFailures = true; + } + + if (status.equals("Sync failed")) { + tableHasError = true; + break; + } + + startIndex += BATCH_SIZE; + } + + // Process remainder batch if no error occurred + if (!tableHasError && remainder > 0) { + List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, + remainder); + + Map syncResult = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), + obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, + Authorization, token); + + if (syncResult == null) { + logger.error("Sync failed for remaining data in schema: {}, table: {}", obj.getSchemaName(), + obj.getTableName()); + failedRecords += syncDataBatch.size(); + groupHasFailures = true; + } else { + String status = (String) syncResult.get("status"); + int batchSuccessCount = (Integer) syncResult.get("successCount"); + int batchFailCount = (Integer) syncResult.get("failCount"); + @SuppressWarnings("unchecked") + List batchFailureReasons = (List) syncResult.get("failureReasons"); + + successfulRecords += batchSuccessCount; + failedRecords += batchFailCount; + + // Add remainder failure reasons to table failure reasons + if (batchFailureReasons != null && !batchFailureReasons.isEmpty()) { + tableFailureReasons.addAll(batchFailureReasons); + groupHasFailures = true; + } + + if (status.equals("Sync failed")) { + groupHasFailures = true; + } + } + } + + // Determine table status based on success/failure counts + String tableStatus; + if (successfulRecords == totalRecords && failedRecords == 0) { + tableStatus = "success"; + } else if (failedRecords == totalRecords && successfulRecords == 0) { + tableStatus = "failed"; + groupHasFailures = true; + } else if (successfulRecords > 0 && failedRecords > 0) { + tableStatus = "partial"; + } else { + tableStatus = "failed"; // Default to failed if unclear + groupHasFailures = true; + } + + // Create detailed table info with failure reasons + Map tableDetails = new HashMap<>(); + tableDetails.put("tableName", obj.getTableName()); + tableDetails.put("schemaName", obj.getSchemaName()); + tableDetails.put("status", tableStatus); + tableDetails.put("totalRecords", totalRecords); + tableDetails.put("successfulRecords", successfulRecords); + tableDetails.put("failedRecords", failedRecords); + + // Add failure reasons only if there are any failures + if (!tableFailureReasons.isEmpty()) { + tableDetails.put("failureReasons", tableFailureReasons); + } + + tableDetailsList.add(tableDetails); + + logger.info("Table sync summary - {}: {} (Success: {}, Failed: {}, Total: {}, Failure Reasons: {})", + tableKey, tableStatus, successfulRecords, failedRecords, totalRecords, + tableFailureReasons.isEmpty() ? "None" : tableFailureReasons); + + } else { + logger.info("No data to sync for schema {} and table {}", obj.getSchemaName(), obj.getTableName()); + + Map tableDetails = new HashMap<>(); + tableDetails.put("tableName", obj.getTableName()); + tableDetails.put("schemaName", obj.getSchemaName()); + tableDetails.put("status", "no_data"); + tableDetails.put("totalRecords", 0); + tableDetails.put("successfulRecords", 0); + tableDetails.put("failedRecords", 0); + tableDetailsList.add(tableDetails); + } + + // If this table had critical failures, stop processing this group + if (tableHasError) { + hasSyncFailed = true; + break; + } +} // Determine overall group status String groupStatus; long successTables = tableDetailsList.stream() @@ -311,8 +468,8 @@ private String startDataSync(int vanID, String user, String Authorization, Strin // Create group response Map groupResponse = new HashMap<>(); - groupResponse.put("groupId", groupId); - groupResponse.put("groupName", groupName != null ? groupName : "Group " + groupId); + groupResponse.put("syncTableGroupID", groupId); + groupResponse.put("syncTableGroupName", groupName != null ? groupName : "Group " + groupId); groupResponse.put("status", groupStatus); groupResponse.put("tables", tableDetailsList); groupResponse.put("summary", Map.of( @@ -328,8 +485,8 @@ private String startDataSync(int vanID, String user, String Authorization, Strin for (int j = dataSyncGroupList.indexOf(dataSyncGroups) + 1; j < dataSyncGroupList.size(); j++) { DataSyncGroups remainingGroup = dataSyncGroupList.get(j); Map pendingGroupResponse = new HashMap<>(); - pendingGroupResponse.put("groupId", remainingGroup.getSyncTableGroupID()); - pendingGroupResponse.put("groupName", + pendingGroupResponse.put("syncTableGroupID", remainingGroup.getSyncTableGroupID()); + pendingGroupResponse.put("syncTableGroupName", remainingGroup.getSyncTableGroupName() != null ? remainingGroup.getSyncTableGroupName() : "Group " + remainingGroup.getSyncTableGroupID()); pendingGroupResponse.put("status", "pending"); @@ -351,7 +508,7 @@ private String startDataSync(int vanID, String user, String Authorization, Strin finalResponse.put("response", "Data sync failed"); finalResponse.put("groupsProgress", responseStatus); return objectMapper.writerWithDefaultPrettyPrinter() - .writeValueAsString(Collections.singletonMap("data", finalResponse)); + .writeValueAsString(finalResponse); } else { // Check if there was any data to sync boolean hasData = responseStatus.stream() @@ -368,7 +525,7 @@ private String startDataSync(int vanID, String user, String Authorization, Strin finalResponse.put("response", "Data sync completed"); finalResponse.put("groupsProgress", responseStatus); return objectMapper.writerWithDefaultPrettyPrinter() - .writeValueAsString(Collections.singletonMap("data", finalResponse)); + .writeValueAsString(finalResponse); } } } @@ -411,100 +568,216 @@ private List> getBatchOfAskedSizeDataToSync(List> dataToBesync, String user, String Authorization, - String token) throws Exception { - - RestTemplate restTemplate = new RestTemplate(); - Integer facilityID = masterVanRepo.getFacilityID(vanID); - - // serialize null - GsonBuilder gsonBuilder = new GsonBuilder(); - gsonBuilder.serializeNulls(); - Gson gson = gsonBuilder.create(); - - Map dataMap = new HashMap<>(); - dataMap.put("schemaName", schemaName); - dataMap.put("tableName", tableName); - dataMap.put("vanAutoIncColumnName", vanAutoIncColumnName); - dataMap.put("serverColumns", serverColumns); - dataMap.put("syncData", dataToBesync); - dataMap.put("syncedBy", user); - if (facilityID != null) - dataMap.put("facilityID", facilityID); - - String requestOBJ = gson.toJson(dataMap); - logger.info("Request obj=" + requestOBJ); - HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); - ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, - String.class); - logger.info("Response for the server=" + response); - logger.info("Response body=" + response.getBody()); - - int successCount = 0; - int failCount = 0; - List successVanSerialNos = new ArrayList<>(); - List failedVanSerialNos = new ArrayList<>(); - - if (response != null && response.hasBody()) { - JSONObject obj = new JSONObject(response.getBody()); - if (obj.has("data")) { - JSONObject dataObj = obj.getJSONObject("data"); - if (dataObj.has("records")) { - JSONArray recordsArr = dataObj.getJSONArray("records"); - for (int i = 0; i < recordsArr.length(); i++) { - JSONObject record = recordsArr.getJSONObject(i); - String vanSerialNo = record.getString("vanSerialNo"); - boolean success = record.getBoolean("success"); - if (success) { - successVanSerialNos.add(vanSerialNo); - successCount++; - } else { - failedVanSerialNos.add(vanSerialNo); - failCount++; - } - } - } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { - // Handle summary response for m_beneficiaryregidmapping - String respMsg = dataObj.optString("response", ""); - int statusCode = obj.optInt("statusCode", 0); - if (respMsg.toLowerCase().contains("success") && statusCode == 200) { - // All records are successful - for (Map map : dataToBesync) { - successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); - } - successCount = successVanSerialNos.size(); + // public String syncDataToServer(int vanID, String schemaName, String tableName, String vanAutoIncColumnName, + // String serverColumns, List> dataToBesync, String user, String Authorization, + // String token) throws Exception { + + // RestTemplate restTemplate = new RestTemplate(); + // Integer facilityID = masterVanRepo.getFacilityID(vanID); + + // // serialize null + // GsonBuilder gsonBuilder = new GsonBuilder(); + // gsonBuilder.serializeNulls(); + // Gson gson = gsonBuilder.create(); + + // Map dataMap = new HashMap<>(); + // dataMap.put("schemaName", schemaName); + // dataMap.put("tableName", tableName); + // dataMap.put("vanAutoIncColumnName", vanAutoIncColumnName); + // dataMap.put("serverColumns", serverColumns); + // dataMap.put("syncData", dataToBesync); + // dataMap.put("syncedBy", user); + // if (facilityID != null) + // dataMap.put("facilityID", facilityID); + + // String requestOBJ = gson.toJson(dataMap); + // logger.info("Request obj=" + requestOBJ); + // HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); + // ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, + // String.class); + // logger.info("Response for the server=" + response); + // logger.info("Response body=" + response.getBody()); + + // int successCount = 0; + // int failCount = 0; + // List successVanSerialNos = new ArrayList<>(); + // List failedVanSerialNos = new ArrayList<>(); + + // if (response != null && response.hasBody()) { + // JSONObject obj = new JSONObject(response.getBody()); + // if (obj.has("data")) { + // JSONObject dataObj = obj.getJSONObject("data"); + // if (dataObj.has("records")) { + // JSONArray recordsArr = dataObj.getJSONArray("records"); + // for (int i = 0; i < recordsArr.length(); i++) { + // JSONObject record = recordsArr.getJSONObject(i); + // String vanSerialNo = record.getString("vanSerialNo"); + // boolean success = record.getBoolean("success"); + // if (success) { + // successVanSerialNos.add(vanSerialNo); + // successCount++; + // } else { + // failedVanSerialNos.add(vanSerialNo); + // failCount++; + // } + // } + // } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { + // // Handle summary response for m_beneficiaryregidmapping + // String respMsg = dataObj.optString("response", ""); + // int statusCode = obj.optInt("statusCode", 0); + // if (respMsg.toLowerCase().contains("success") && statusCode == 200) { + // // All records are successful + // for (Map map : dataToBesync) { + // successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + // } + // successCount = successVanSerialNos.size(); + // } else { + // // All records failed + // for (Map map : dataToBesync) { + // failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + // } + // failCount = failedVanSerialNos.size(); + // } + // } + // } + // } + + // logger.info("Success Van Serial No=" + successVanSerialNos.toString()); + // logger.info("Failed Van Serial No=" + failedVanSerialNos.toString()); + + // // Update processed flag for success and failed vanSerialNos + // if (!successVanSerialNos.isEmpty()) { + // dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, + // vanAutoIncColumnName, user, "P"); + // } + // if (!failedVanSerialNos.isEmpty()) { + // dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, failedVanSerialNos, + // vanAutoIncColumnName, user, "F"); + // } + + // if (successCount > 0 && failCount == 0) + // return "Data successfully synced"; + // else if (successCount > 0 && failCount > 0) + // return "Partial success: " + successCount + " records synced, " + failCount + " failed"; + // else + // return "Sync failed"; + // } + +/** + * Updated syncDataToServer method that captures failure reasons + */ +public Map syncDataToServer(int vanID, String schemaName, String tableName, String vanAutoIncColumnName, + String serverColumns, List> dataToBesync, String user, String Authorization, + String token) throws Exception { + + RestTemplate restTemplate = new RestTemplate(); + Integer facilityID = masterVanRepo.getFacilityID(vanID); + + // serialize null + GsonBuilder gsonBuilder = new GsonBuilder(); + gsonBuilder.serializeNulls(); + Gson gson = gsonBuilder.create(); + + Map dataMap = new HashMap<>(); + dataMap.put("schemaName", schemaName); + dataMap.put("tableName", tableName); + dataMap.put("vanAutoIncColumnName", vanAutoIncColumnName); + dataMap.put("serverColumns", serverColumns); + dataMap.put("syncData", dataToBesync); + dataMap.put("syncedBy", user); + if (facilityID != null) + dataMap.put("facilityID", facilityID); + + String requestOBJ = gson.toJson(dataMap); + logger.info("Request obj=" + requestOBJ); + HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); + ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, + String.class); + logger.info("Response for the server=" + response); + logger.info("Response body=" + response.getBody()); + + int successCount = 0; + int failCount = 0; + List successVanSerialNos = new ArrayList<>(); + List failedVanSerialNos = new ArrayList<>(); + List failureReasons = new ArrayList<>(); // New: collect failure reasons + + if (response != null && response.hasBody()) { + JSONObject obj = new JSONObject(response.getBody()); + if (obj.has("data")) { + JSONObject dataObj = obj.getJSONObject("data"); + if (dataObj.has("records")) { + JSONArray recordsArr = dataObj.getJSONArray("records"); + for (int i = 0; i < recordsArr.length(); i++) { + JSONObject record = recordsArr.getJSONObject(i); + String vanSerialNo = record.getString("vanSerialNo"); + boolean success = record.getBoolean("success"); + if (success) { + successVanSerialNos.add(vanSerialNo); + successCount++; } else { - // All records failed - for (Map map : dataToBesync) { - failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); - } - failCount = failedVanSerialNos.size(); + failedVanSerialNos.add(vanSerialNo); + failCount++; + // Capture the failure reason + String reason = record.optString("reason", "Unknown error"); + failureReasons.add(reason); + } + } + } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { + // Handle summary response for m_beneficiaryregidmapping + String respMsg = dataObj.optString("response", ""); + int statusCode = obj.optInt("statusCode", 0); + if (respMsg.toLowerCase().contains("success") && statusCode == 200) { + // All records are successful + for (Map map : dataToBesync) { + successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + successCount = successVanSerialNos.size(); + } else { + // All records failed + for (Map map : dataToBesync) { + failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); + } + failCount = failedVanSerialNos.size(); + // Add a generic reason for all failed records + for (int i = 0; i < failCount; i++) { + failureReasons.add(respMsg.isEmpty() ? "Sync failed" : respMsg); } } } } + } - logger.info("Success Van Serial No=" + successVanSerialNos.toString()); - logger.info("Failed Van Serial No=" + failedVanSerialNos.toString()); + logger.info("Success Van Serial No=" + successVanSerialNos.toString()); + logger.info("Failed Van Serial No=" + failedVanSerialNos.toString()); + logger.info("Failure Reasons=" + failureReasons.toString()); - // Update processed flag for success and failed vanSerialNos - if (!successVanSerialNos.isEmpty()) { - dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, - vanAutoIncColumnName, user, "P"); - } - if (!failedVanSerialNos.isEmpty()) { - dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, failedVanSerialNos, - vanAutoIncColumnName, user, "F"); - } + // Update processed flag for success and failed vanSerialNos + if (!successVanSerialNos.isEmpty()) { + dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, + vanAutoIncColumnName, user, "P","Null"); + } + if (!failedVanSerialNos.isEmpty()) { + dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, failedVanSerialNos, + vanAutoIncColumnName, user, "F",failureReasons.get(0)); + } - if (successCount > 0 && failCount == 0) - return "Data successfully synced"; - else if (successCount > 0 && failCount > 0) - return "Partial success: " + successCount + " records synced, " + failCount + " failed"; - else - return "Sync failed"; + // Return detailed result object instead of just a string + Map result = new HashMap<>(); + if (successCount > 0 && failCount == 0) { + result.put("status", "Data successfully synced"); + } else if (successCount > 0 && failCount > 0) { + result.put("status", "Partial success: " + successCount + " records synced, " + failCount + " failed"); + } else { + result.put("status", "Sync failed"); } + + result.put("successCount", successCount); + result.put("failCount", failCount); + result.put("failureReasons", failureReasons); + + return result; +} public StringBuilder getVanSerialNoListForSyncedData(String vanAutoIncColumnName, List> dataToBesync) throws Exception { diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 0ee9f25d..41be9620 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -785,7 +785,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, shortReason)); + syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); } } } @@ -829,7 +829,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, shortReason)); + syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); } } } From 58cfc27753eb7c168ae03c0b45995a66b8e71d98 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 19:49:27 +0530 Subject: [PATCH 40/45] fix: remove reason and add dataaccess exception --- .../service/dataSyncActivity/UploadDataToServerImpl.java | 1 - .../GetDataFromVanAndSyncToDBImpl.java | 9 +++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index 8341b94c..d7b3ba1f 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -774,7 +774,6 @@ public Map syncDataToServer(int vanID, String schemaName, String result.put("successCount", successCount); result.put("failCount", failCount); - result.put("failureReasons", failureReasons); return result; } diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 41be9620..f1b51c7c 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -31,6 +31,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DataAccessException; import org.springframework.stereotype.Service; import com.fasterxml.jackson.databind.ObjectMapper; @@ -772,7 +773,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } } - } catch (Exception e) { + } catch (DataAccessException e) { insertSuccess = false; logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); @@ -785,7 +786,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); + syncUploadDataDigester.getSyncedBy(), false, e.getCause().getMessage())); } } } @@ -816,7 +817,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } } - } catch (Exception e) { + } catch (DataAccessException e) { updateSuccess = false; logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); @@ -829,7 +830,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, e.getMessage())); + syncUploadDataDigester.getSyncedBy(), false, e.getCause().getMessage())); } } } From cfc6e735321227985e24f202f680ffd4d2fda480 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 20:37:25 +0530 Subject: [PATCH 41/45] fix: revert the server exception --- .../GetDataFromVanAndSyncToDBImpl.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index f1b51c7c..0ee9f25d 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -31,7 +31,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.dao.DataAccessException; import org.springframework.stereotype.Service; import com.fasterxml.jackson.databind.ObjectMapper; @@ -773,7 +772,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } } - } catch (DataAccessException e) { + } catch (Exception e) { insertSuccess = false; logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); @@ -786,7 +785,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, e.getCause().getMessage())); + syncUploadDataDigester.getSyncedBy(), false, shortReason)); } } } @@ -817,7 +816,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig } } - } catch (DataAccessException e) { + } catch (Exception e) { updateSuccess = false; logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); @@ -830,7 +829,7 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, e.getCause().getMessage())); + syncUploadDataDigester.getSyncedBy(), false, shortReason)); } } } From d814b5c76647bfdf85f85ac0cce2a34e2171edb2 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 20:51:27 +0530 Subject: [PATCH 42/45] fix: remove unwanted code --- .../UploadDataToServerImpl.java | 268 +----------------- .../GetDataFromVanAndSyncToDBImpl.java | 17 +- 2 files changed, 12 insertions(+), 273 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java index d7b3ba1f..266d3d28 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncActivity/UploadDataToServerImpl.java @@ -142,155 +142,7 @@ private String startDataSync(int vanID, String user, String Authorization, Strin List> tableDetailsList = new ArrayList<>(); boolean groupHasFailures = false; - // for (SyncUtilityClass obj : syncUtilityClassList) { - // String tableKey = obj.getSchemaName() + "." + obj.getTableName(); - // boolean tableHasError = false; // Move this to the correct scope - - // // get data from DB to sync to server - // List> syncData = getDataToSync(obj.getSchemaName(), obj.getTableName(), - // obj.getVanColumnName()); - // logger.debug("Fetched syncData for schema {} and table {}: {}", obj.getSchemaName(), obj.getTableName(), - // objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(syncData)); - - // if (syncData != null && syncData.size() > 0) { - // int dataSize = syncData.size(); - // int startIndex = 0; - // int fullBatchCount = dataSize / BATCH_SIZE; - // int remainder = dataSize % BATCH_SIZE; - - // // Track table-level success/failure counts - // int totalRecords = dataSize; - // int successfulRecords = 0; - // int failedRecords = 0; - - // logger.info("Starting batch sync for schema: {}, table: {} with {} full batches and {} remainder", - // obj.getSchemaName(), obj.getTableName(), fullBatchCount, remainder); - - // // Process full batches - // for (int i = 0; i < fullBatchCount && !tableHasError; i++) { - // List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, - // BATCH_SIZE); - // serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), - // obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, - // Authorization, token); - // logger.debug("Server acknowledgement for batch {}: {}", i, serverAcknowledgement); - - // // Parse the string response from syncDataToServer method - // if (serverAcknowledgement == null) { - // logger.error("Sync failed for batch {} in schema: {}, table: {}", i, obj.getSchemaName(), - // obj.getTableName()); - // tableHasError = true; - // failedRecords += syncDataBatch.size(); - // groupHasFailures = true; - // break; - // } else if ("Data successfully synced".equals(serverAcknowledgement)) { - // successfulRecords += syncDataBatch.size(); - // } else if (serverAcknowledgement.startsWith("Partial success:")) { - // // Parse "Partial success: X records synced, Y failed" - // try { - // String[] parts = serverAcknowledgement.split(" "); - // int batchSuccess = Integer.parseInt(parts[2]); - // int batchFailed = Integer.parseInt(parts[5]); - // successfulRecords += batchSuccess; - // failedRecords += batchFailed; - // } catch (Exception e) { - // logger.warn("Could not parse partial success counts for batch {}: {}", i, - // serverAcknowledgement); - // // Assume half successful, half failed as fallback - // successfulRecords += syncDataBatch.size() / 2; - // failedRecords += syncDataBatch.size() - (syncDataBatch.size() / 2); - // } - // } else if ("Sync failed".equals(serverAcknowledgement)) { - // failedRecords += syncDataBatch.size(); - // groupHasFailures = true; - // } - - // startIndex += BATCH_SIZE; - // } - - // // Process remainder batch if no error occurred - // if (!tableHasError && remainder > 0) { - // List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, - // remainder); - // serverAcknowledgement = syncDataToServer(vanID, obj.getSchemaName(), obj.getTableName(), - // obj.getVanAutoIncColumnName(), obj.getServerColumnName(), syncDataBatch, user, - // Authorization, token); - - // if (serverAcknowledgement == null) { - // logger.error("Sync failed for remaining data in schema: {}, table: {}", obj.getSchemaName(), - // obj.getTableName()); - // failedRecords += syncDataBatch.size(); - // groupHasFailures = true; - // } else if ("Data successfully synced".equals(serverAcknowledgement)) { - // successfulRecords += syncDataBatch.size(); - // } else if (serverAcknowledgement.startsWith("Partial success:")) { - // try { - // String[] parts = serverAcknowledgement.split(" "); - // int batchSuccess = Integer.parseInt(parts[2]); - // int batchFailed = Integer.parseInt(parts[5]); - // successfulRecords += batchSuccess; - // failedRecords += batchFailed; - // } catch (Exception e) { - // logger.warn("Could not parse partial success counts for remainder: {}", - // serverAcknowledgement); - // successfulRecords += syncDataBatch.size() / 2; - // failedRecords += syncDataBatch.size() - (syncDataBatch.size() / 2); - // } - // } else if ("Sync failed".equals(serverAcknowledgement)) { - // failedRecords += syncDataBatch.size(); - // groupHasFailures = true; - // } - // } - - // // Determine table status based on success/failure counts - // String tableStatus; - // if (successfulRecords == totalRecords && failedRecords == 0) { - // tableStatus = "success"; - // } else if (failedRecords == totalRecords && successfulRecords == 0) { - // tableStatus = "failed"; - // groupHasFailures = true; - // } else if (successfulRecords > 0 && failedRecords > 0) { - // tableStatus = "partial"; - // } else { - // tableStatus = "failed"; // Default to failed if unclear - // groupHasFailures = true; - // } - - // // Create detailed table info - // Map tableDetails = new HashMap<>(); - // tableDetails.put("tableName", obj.getTableName()); - // tableDetails.put("schemaName", obj.getSchemaName()); - // tableDetails.put("status", tableStatus); - // tableDetails.put("totalRecords", totalRecords); - // tableDetails.put("successfulRecords", successfulRecords); - // tableDetails.put("failedRecords", failedRecords); - // tableDetailsList.add(tableDetails); - - // logger.info("Table sync summary - {}: {} (Success: {}, Failed: {}, Total: {})", - // tableKey, tableStatus, successfulRecords, failedRecords, totalRecords); - - // } else { - // logger.info("No data to sync for schema {} and table {}", obj.getSchemaName(), obj.getTableName()); - - // Map tableDetails = new HashMap<>(); - // tableDetails.put("tableName", obj.getTableName()); - // tableDetails.put("schemaName", obj.getSchemaName()); - // tableDetails.put("status", "no_data"); - // tableDetails.put("totalRecords", 0); - // tableDetails.put("successfulRecords", 0); - // tableDetails.put("failedRecords", 0); - // tableDetailsList.add(tableDetails); - // } - - // // If this table had critical failures, stop processing this group - // if (tableHasError) { - // hasSyncFailed = true; - // break; - // } - // } - -// Updated portion of startDataSync method - replace the batch processing section -for (SyncUtilityClass obj : syncUtilityClassList) { + for (SyncUtilityClass obj : syncUtilityClassList) { String tableKey = obj.getSchemaName() + "." + obj.getTableName(); boolean tableHasError = false; @@ -310,7 +162,7 @@ private String startDataSync(int vanID, String user, String Authorization, Strin int totalRecords = dataSize; int successfulRecords = 0; int failedRecords = 0; - List tableFailureReasons = new ArrayList<>(); // Collect all failure reasons for this table + List tableFailureReasons = new ArrayList<>(); logger.info("Starting batch sync for schema: {}, table: {} with {} full batches and {} remainder", obj.getSchemaName(), obj.getTableName(), fullBatchCount, remainder); @@ -343,7 +195,6 @@ private String startDataSync(int vanID, String user, String Authorization, Strin successfulRecords += batchSuccessCount; failedRecords += batchFailCount; - // Add batch failure reasons to table failure reasons if (batchFailureReasons != null && !batchFailureReasons.isEmpty()) { tableFailureReasons.addAll(batchFailureReasons); groupHasFailures = true; @@ -357,7 +208,6 @@ private String startDataSync(int vanID, String user, String Authorization, Strin startIndex += BATCH_SIZE; } - // Process remainder batch if no error occurred if (!tableHasError && remainder > 0) { List> syncDataBatch = getBatchOfAskedSizeDataToSync(syncData, startIndex, remainder); @@ -381,7 +231,6 @@ private String startDataSync(int vanID, String user, String Authorization, Strin successfulRecords += batchSuccessCount; failedRecords += batchFailCount; - // Add remainder failure reasons to table failure reasons if (batchFailureReasons != null && !batchFailureReasons.isEmpty()) { tableFailureReasons.addAll(batchFailureReasons); groupHasFailures = true; @@ -565,108 +414,7 @@ private List> getBatchOfAskedSizeDataToSync(List> dataToBesync, String user, String Authorization, - // String token) throws Exception { - - // RestTemplate restTemplate = new RestTemplate(); - // Integer facilityID = masterVanRepo.getFacilityID(vanID); - - // // serialize null - // GsonBuilder gsonBuilder = new GsonBuilder(); - // gsonBuilder.serializeNulls(); - // Gson gson = gsonBuilder.create(); - - // Map dataMap = new HashMap<>(); - // dataMap.put("schemaName", schemaName); - // dataMap.put("tableName", tableName); - // dataMap.put("vanAutoIncColumnName", vanAutoIncColumnName); - // dataMap.put("serverColumns", serverColumns); - // dataMap.put("syncData", dataToBesync); - // dataMap.put("syncedBy", user); - // if (facilityID != null) - // dataMap.put("facilityID", facilityID); - - // String requestOBJ = gson.toJson(dataMap); - // logger.info("Request obj=" + requestOBJ); - // HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); - // ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, - // String.class); - // logger.info("Response for the server=" + response); - // logger.info("Response body=" + response.getBody()); - - // int successCount = 0; - // int failCount = 0; - // List successVanSerialNos = new ArrayList<>(); - // List failedVanSerialNos = new ArrayList<>(); - - // if (response != null && response.hasBody()) { - // JSONObject obj = new JSONObject(response.getBody()); - // if (obj.has("data")) { - // JSONObject dataObj = obj.getJSONObject("data"); - // if (dataObj.has("records")) { - // JSONArray recordsArr = dataObj.getJSONArray("records"); - // for (int i = 0; i < recordsArr.length(); i++) { - // JSONObject record = recordsArr.getJSONObject(i); - // String vanSerialNo = record.getString("vanSerialNo"); - // boolean success = record.getBoolean("success"); - // if (success) { - // successVanSerialNos.add(vanSerialNo); - // successCount++; - // } else { - // failedVanSerialNos.add(vanSerialNo); - // failCount++; - // } - // } - // } else if (tableName.equalsIgnoreCase("m_beneficiaryregidmapping")) { - // // Handle summary response for m_beneficiaryregidmapping - // String respMsg = dataObj.optString("response", ""); - // int statusCode = obj.optInt("statusCode", 0); - // if (respMsg.toLowerCase().contains("success") && statusCode == 200) { - // // All records are successful - // for (Map map : dataToBesync) { - // successVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); - // } - // successCount = successVanSerialNos.size(); - // } else { - // // All records failed - // for (Map map : dataToBesync) { - // failedVanSerialNos.add(String.valueOf(map.get(vanAutoIncColumnName))); - // } - // failCount = failedVanSerialNos.size(); - // } - // } - // } - // } - - // logger.info("Success Van Serial No=" + successVanSerialNos.toString()); - // logger.info("Failed Van Serial No=" + failedVanSerialNos.toString()); - - // // Update processed flag for success and failed vanSerialNos - // if (!successVanSerialNos.isEmpty()) { - // dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, - // vanAutoIncColumnName, user, "P"); - // } - // if (!failedVanSerialNos.isEmpty()) { - // dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, failedVanSerialNos, - // vanAutoIncColumnName, user, "F"); - // } - - // if (successCount > 0 && failCount == 0) - // return "Data successfully synced"; - // else if (successCount > 0 && failCount > 0) - // return "Partial success: " + successCount + " records synced, " + failCount + " failed"; - // else - // return "Sync failed"; - // } - -/** - * Updated syncDataToServer method that captures failure reasons - */ -public Map syncDataToServer(int vanID, String schemaName, String tableName, String vanAutoIncColumnName, + public Map syncDataToServer(int vanID, String schemaName, String tableName, String vanAutoIncColumnName, String serverColumns, List> dataToBesync, String user, String Authorization, String token) throws Exception { @@ -689,18 +437,17 @@ public Map syncDataToServer(int vanID, String schemaName, String dataMap.put("facilityID", facilityID); String requestOBJ = gson.toJson(dataMap); - logger.info("Request obj=" + requestOBJ); HttpEntity request = RestTemplateUtil.createRequestEntity(requestOBJ, Authorization, "datasync"); ResponseEntity response = restTemplate.exchange(dataSyncUploadUrl, HttpMethod.POST, request, String.class); - logger.info("Response for the server=" + response); - logger.info("Response body=" + response.getBody()); + + logger.info("Response from the server=" + response); int successCount = 0; int failCount = 0; List successVanSerialNos = new ArrayList<>(); List failedVanSerialNos = new ArrayList<>(); - List failureReasons = new ArrayList<>(); // New: collect failure reasons + List failureReasons = new ArrayList<>(); if (response != null && response.hasBody()) { JSONObject obj = new JSONObject(response.getBody()); @@ -750,8 +497,7 @@ public Map syncDataToServer(int vanID, String schemaName, String logger.info("Success Van Serial No=" + successVanSerialNos.toString()); logger.info("Failed Van Serial No=" + failedVanSerialNos.toString()); - logger.info("Failure Reasons=" + failureReasons.toString()); - + // Update processed flag for success and failed vanSerialNos if (!successVanSerialNos.isEmpty()) { dataSyncRepository.updateProcessedFlagInVan(schemaName, tableName, successVanSerialNos, diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 0ee9f25d..24be4acf 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -166,19 +166,12 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E } } - // if (syncSuccess) { - // return "Overall data sync passed."; - // } else { - // return "Overall data sync failed. Details: " + errorMessage; - // } Map responseMap = new HashMap<>(); - responseMap.put("statusCode", 200); - responseMap.put("message", "Data sync completed"); - responseMap.put("records", syncResults); - logger.info("Response = "+responseMap); - logger.info("Sync Results = "+syncResults); - return new ObjectMapper().writeValueAsString(responseMap); - + responseMap.put("statusCode", 200); + responseMap.put("message", "Data sync completed"); + responseMap.put("records", syncResults); + + return new ObjectMapper().writeValueAsString(responseMap); } } From 5fde263d38991015e7bd7d94b1cd64609c45eb80 Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 20:57:12 +0530 Subject: [PATCH 43/45] fix: exception message --- .../GetDataFromVanAndSyncToDBImpl.java | 1766 +++++++++-------- 1 file changed, 934 insertions(+), 832 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 24be4acf..56704645 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -1,868 +1,971 @@ - /* - * AMRIT – Accessible Medical Records via Integrated Technology - * Integrated EHR (Electronic Health Records) Solution - * - * Copyright (C) "Piramal Swasthya Management and Research Institute" - * - * This file is part of AMRIT. - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see https://www.gnu.org/licenses/. - */ - package com.iemr.mmu.service.dataSyncLayerCentral; - - import java.time.LocalDateTime; - import java.util.ArrayList; - import java.util.Arrays; - import java.util.HashMap; - import java.util.List; - import java.util.Map; - - import org.slf4j.Logger; - import org.slf4j.LoggerFactory; - import org.springframework.beans.factory.annotation.Autowired; - import org.springframework.stereotype.Service; - - import com.fasterxml.jackson.databind.ObjectMapper; - import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; - import com.iemr.mmu.service.dataSyncActivity.SyncResult; - - @Service - public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { - - private static final String SERVER_COLUMNS_NOT_REQUIRED = null; // Renamed for clarity - private static final Logger logger = LoggerFactory.getLogger(GetDataFromVanAndSyncToDBImpl.class); - - @Autowired - private DataSyncRepositoryCentral dataSyncRepositoryCentral; - - private static final Map> TABLE_GROUPS = new HashMap<>(); - static { - TABLE_GROUPS.put(1, - Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", - "i_beneficiarycontacts", "i_beneficiarydetails", "i_beneficiaryfamilymapping", - "i_beneficiaryidentity", "i_beneficiarymapping")); - - TABLE_GROUPS.put(2, - Arrays.asList("t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", - "t_pnccare", "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", - "t_physicalactivity")); - - TABLE_GROUPS.put(3, - Arrays.asList("t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", - "t_sys_cardiovascular", "t_sys_respiratory", "t_sys_centralnervous", - "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem")); - - TABLE_GROUPS.put(4, - Arrays.asList("t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", - "t_benclinicalobservation", "t_prescription", "t_prescribeddrug", "t_lab_testorder", - "t_benreferdetails")); - - TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult", "t_physicalstockentry", "t_patientissue", - "t_facilityconsumption", "t_itemstockentry", "t_itemstockexit")); - - TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", - "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", - "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", - "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory")); - - TABLE_GROUPS.put(7, - Arrays.asList("t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", - "t_cancerobstetrichistory", "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", - "t_canceroralexamination", "t_cancerbreastexamination", "t_cancerabdominalexamination", - "t_cancergynecologicalexamination", "t_cancerdiagnosis", "t_cancerimageannotation")); - - TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); - - TABLE_GROUPS.put(9, - Arrays.asList("t_itemstockentry", "t_itemstockexit", "t_patientissue", "t_physicalstockentry", - "t_stockadjustment", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentissue", "t_indentorder", "t_saitemmapping")); +/* +* AMRIT – Accessible Medical Records via Integrated Technology +* Integrated EHR (Electronic Health Records) Solution +* +* Copyright (C) "Piramal Swasthya Management and Research Institute" +* +* This file is part of AMRIT. +* +* This program is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with this program. If not, see https://www.gnu.org/licenses/. +*/ +package com.iemr.mmu.service.dataSyncLayerCentral; + +import java.sql.SQLException; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.iemr.mmu.data.syncActivity_syncLayer.SyncUploadDataDigester; +import com.iemr.mmu.service.dataSyncActivity.SyncResult; + +@Service +public class GetDataFromVanAndSyncToDBImpl implements GetDataFromVanAndSyncToDB { + + private static final String SERVER_COLUMNS_NOT_REQUIRED = null; // Renamed for clarity + private static final Logger logger = LoggerFactory.getLogger(GetDataFromVanAndSyncToDBImpl.class); + + @Autowired + private DataSyncRepositoryCentral dataSyncRepositoryCentral; + + private static final Map> TABLE_GROUPS = new HashMap<>(); + static { + TABLE_GROUPS.put(1, + Arrays.asList("m_beneficiaryregidmapping", "i_beneficiaryaccount", "i_beneficiaryaddress", + "i_beneficiarycontacts", "i_beneficiarydetails", "i_beneficiaryfamilymapping", + "i_beneficiaryidentity", "i_beneficiarymapping")); + + TABLE_GROUPS.put(2, + Arrays.asList("t_benvisitdetail", "t_phy_anthropometry", "t_phy_vitals", "t_benadherence", "t_anccare", + "t_pnccare", "t_ncdscreening", "t_ncdcare", "i_ben_flow_outreach", "t_covid19", "t_idrsdetails", + "t_physicalactivity")); + + TABLE_GROUPS.put(3, + Arrays.asList("t_phy_generalexam", "t_phy_headtotoe", "t_sys_obstetric", "t_sys_gastrointestinal", + "t_sys_cardiovascular", "t_sys_respiratory", "t_sys_centralnervous", + "t_sys_musculoskeletalsystem", "t_sys_genitourinarysystem")); + + TABLE_GROUPS.put(4, + Arrays.asList("t_ancdiagnosis", "t_ncddiagnosis", "t_pncdiagnosis", "t_benchefcomplaint", + "t_benclinicalobservation", "t_prescription", "t_prescribeddrug", "t_lab_testorder", + "t_benreferdetails")); + + TABLE_GROUPS.put(5, Arrays.asList("t_lab_testresult", "t_physicalstockentry", "t_patientissue", + "t_facilityconsumption", "t_itemstockentry", "t_itemstockexit")); + + TABLE_GROUPS.put(6, Arrays.asList("t_benmedhistory", "t_femaleobstetrichistory", "t_benmenstrualdetails", + "t_benpersonalhabit", "t_childvaccinedetail1", "t_childvaccinedetail2", "t_childoptionalvaccinedetail", + "t_ancwomenvaccinedetail", "t_childfeedinghistory", "t_benallergyhistory", "t_bencomorbiditycondition", + "t_benmedicationhistory", "t_benfamilyhistory", "t_perinatalhistory", "t_developmenthistory")); + + TABLE_GROUPS.put(7, + Arrays.asList("t_cancerfamilyhistory", "t_cancerpersonalhistory", "t_cancerdiethistory", + "t_cancerobstetrichistory", "t_cancervitals", "t_cancersignandsymptoms", "t_cancerlymphnode", + "t_canceroralexamination", "t_cancerbreastexamination", "t_cancerabdominalexamination", + "t_cancergynecologicalexamination", "t_cancerdiagnosis", "t_cancerimageannotation")); + + TABLE_GROUPS.put(8, Arrays.asList("i_beneficiaryimage")); + + TABLE_GROUPS.put(9, + Arrays.asList("t_itemstockentry", "t_itemstockexit", "t_patientissue", "t_physicalstockentry", + "t_stockadjustment", "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentissue", "t_indentorder", "t_saitemmapping")); - } + } - public String syncDataToServer(String requestOBJ, String Authorization) throws Exception { + public String syncDataToServer(String requestOBJ, String Authorization) throws Exception { - ObjectMapper mapper = new ObjectMapper(); - SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncResults = new ArrayList<>(); // <-- define here + ObjectMapper mapper = new ObjectMapper(); + SyncUploadDataDigester syncUploadDataDigester = mapper.readValue(requestOBJ, SyncUploadDataDigester.class); + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncResults = new ArrayList<>(); // <-- define here - logger.info("Data to be synced: {}", dataToBesync); - if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { - logger.error("Invalid SyncUploadDataDigester object or tableName is null."); - return "Error: Invalid sync request."; - } + logger.info("Data to be synced: {}", dataToBesync); + if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { + logger.error("Invalid SyncUploadDataDigester object or tableName is null."); + return "Error: Invalid sync request."; + } - String syncTableName = syncUploadDataDigester.getTableName(); - logger.info("Syncing data for table: {}", syncTableName); - // Handle specific tables first, if their logic is distinct - if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { - String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester, syncResults); - if ("data sync passed".equals(result)) { - return "Sync successful for m_beneficiaryregidmapping."; - } else { - logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); - return "Sync failed for m_beneficiaryregidmapping."; - } + String syncTableName = syncUploadDataDigester.getTableName(); + logger.info("Syncing data for table: {}", syncTableName); + // Handle specific tables first, if their logic is distinct + if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { + String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester, syncResults); + if ("data sync passed".equals(result)) { + return "Sync successful for m_beneficiaryregidmapping."; + } else { + logger.error("Sync failed for m_beneficiaryregidmapping: {}", result); + return "Sync failed for m_beneficiaryregidmapping."; } - else { - boolean syncSuccess = true; - String errorMessage = ""; - if (syncTableName != null && !syncTableName.isEmpty()) { - boolean foundInGroup = false; - - for (Map map : dataToBesync) { - if (map.get("tableName") != null - && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { - syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, - syncUploadDataDigester, syncResults); - foundInGroup = true; - break; - } - } - if (!foundInGroup) { - logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", - syncTableName); - syncSuccess = performGenericTableSync(syncUploadDataDigester, syncResults); + } else { + boolean syncSuccess = true; + String errorMessage = ""; + if (syncTableName != null && !syncTableName.isEmpty()) { + boolean foundInGroup = false; + + for (Map map : dataToBesync) { + if (map.get("tableName") != null + && map.get("tableName").toString().equalsIgnoreCase(syncTableName)) { + syncSuccess = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), syncTableName, + syncUploadDataDigester, syncResults); + foundInGroup = true; + break; } - } else { + } + if (!foundInGroup) { + logger.warn("Table '{}' not found in any predefined groups. Proceeding with generic sync logic.", + syncTableName); + syncSuccess = performGenericTableSync(syncUploadDataDigester, syncResults); + } + } else { - for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { - Integer groupId = entry.getKey(); - List tablesInGroup = entry.getValue(); - for (String table : tablesInGroup) { - try { - - boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), - table, syncUploadDataDigester, syncResults); - if (!currentTableSyncResult) { - syncSuccess = false; - errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; - logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, - errorMessage); - - } else { - logger.info("Successfully synced table: {} in Group {}", table, groupId); - } - } catch (Exception e) { + for (Map.Entry> entry : TABLE_GROUPS.entrySet()) { + Integer groupId = entry.getKey(); + List tablesInGroup = entry.getValue(); + for (String table : tablesInGroup) { + try { + + boolean currentTableSyncResult = syncTablesInGroup(syncUploadDataDigester.getSchemaName(), + table, syncUploadDataDigester, syncResults); + if (!currentTableSyncResult) { syncSuccess = false; - errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " - + e.getMessage() + ". "; - logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, - e.getMessage(), e); + errorMessage += "Failed to sync table: " + table + " in Group " + groupId + ". "; + logger.error("Sync failed for table '{}' in Group {}. Error: {}", table, groupId, + errorMessage); + + } else { + logger.info("Successfully synced table: {} in Group {}", table, groupId); } + } catch (Exception e) { + syncSuccess = false; + errorMessage += "Exception during sync for table: " + table + " in Group " + groupId + ": " + + e.getMessage() + ". "; + logger.error("Exception during sync for table '{}' in Group {}: {}", table, groupId, + e.getMessage(), e); } } } - - Map responseMap = new HashMap<>(); - responseMap.put("statusCode", 200); - responseMap.put("message", "Data sync completed"); - responseMap.put("records", syncResults); - - return new ObjectMapper().writeValueAsString(responseMap); } - } - private boolean syncTablesInGroup(String schemaName, String currentTableName, - SyncUploadDataDigester originalDigester, List syncResults) { - SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); - tableSpecificDigester.setSchemaName(schemaName); - tableSpecificDigester.setTableName(currentTableName); - tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); - tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); - tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); - tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); - tableSpecificDigester.setSyncData(originalDigester.getSyncData()); - return performGenericTableSync(tableSpecificDigester, syncResults); + // if (syncSuccess) { + // return "Overall data sync passed."; + // } else { + // return "Overall data sync failed. Details: " + errorMessage; + // } + Map responseMap = new HashMap<>(); + responseMap.put("statusCode", 200); + responseMap.put("message", "Data sync completed"); + responseMap.put("records", syncResults); + logger.info("Response = " + responseMap); + logger.info("Sync Results = " + syncResults); + return new ObjectMapper().writeValueAsString(responseMap); + } + } - private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( - SyncUploadDataDigester syncUploadDataDigester, List syncResults) { + private boolean syncTablesInGroup(String schemaName, String currentTableName, + SyncUploadDataDigester originalDigester, List syncResults) { + SyncUploadDataDigester tableSpecificDigester = new SyncUploadDataDigester(); + tableSpecificDigester.setSchemaName(schemaName); + tableSpecificDigester.setTableName(currentTableName); + tableSpecificDigester.setSyncedBy(originalDigester.getSyncedBy()); + tableSpecificDigester.setFacilityID(originalDigester.getFacilityID()); + tableSpecificDigester.setVanAutoIncColumnName(originalDigester.getVanAutoIncColumnName()); + tableSpecificDigester.setServerColumns(originalDigester.getServerColumns()); + tableSpecificDigester.setSyncData(originalDigester.getSyncData()); + return performGenericTableSync(tableSpecificDigester, syncResults); + } - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncData = new ArrayList<>(); + private String update_M_BeneficiaryRegIdMapping_for_provisioned_benID( + SyncUploadDataDigester syncUploadDataDigester, List syncResults) { - String query = getqueryFor_M_BeneficiaryRegIdMapping(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName()); + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncData = new ArrayList<>(); - for (Map map : dataToBesync) { - if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { - Object[] objArr = new Object[4]; - objArr[0] = syncUploadDataDigester.getSyncedBy(); // SyncedBy - objArr[1] = String.valueOf(map.get("BenRegId")); - objArr[2] = String.valueOf(map.get("BeneficiaryID")); - objArr[3] = String.valueOf(map.get("VanID")); - syncData.add(objArr); - } else { - logger.warn( - "Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", - map); - } + String query = getqueryFor_M_BeneficiaryRegIdMapping(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); + + for (Map map : dataToBesync) { + if (map.get("BenRegId") != null && map.get("BeneficiaryID") != null && map.get("VanID") != null) { + Object[] objArr = new Object[4]; + objArr[0] = syncUploadDataDigester.getSyncedBy(); // SyncedBy + objArr[1] = String.valueOf(map.get("BenRegId")); + objArr[2] = String.valueOf(map.get("BeneficiaryID")); + objArr[3] = String.valueOf(map.get("VanID")); + syncData.add(objArr); + } else { + logger.warn( + "Skipping record in m_beneficiaryregidmapping due to missing BenRegId, BeneficiaryID, or VanID: {}", + map); } + } - if (!syncData.isEmpty()) { - try { - int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, - syncData); + if (!syncData.isEmpty()) { + try { + int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName(), syncUploadDataDigester.getServerColumns(), query, + syncData); - if (i.length == syncData.size()) { - logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); - return "data sync passed"; - } else { - logger.error( - "Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", - syncData.size(), i.length, getFailedRecords(i, syncData)); - return "Partial data sync for m_beneficiaryregidmapping."; - } - } catch (Exception e) { - logger.error("Exception during update for m_beneficiaryregidmapping: {}", e.getMessage(), e); - return "Error during sync for m_beneficiaryregidmapping: " + e.getMessage(); + if (i.length == syncData.size()) { + logger.info("Successfully updated {} records for m_beneficiaryregidmapping.", i.length); + return "data sync passed"; + } else { + logger.error( + "Partial update for m_beneficiaryregidmapping. Expected {} updates, got {}. Failed records: {}", + syncData.size(), i.length, getFailedRecords(i, syncData)); + return "Partial data sync for m_beneficiaryregidmapping."; } - } else { - logger.info("No data to sync for m_beneficiaryregidmapping."); - return "data sync passed"; + } catch (Exception e) { + logger.error("Exception during update for m_beneficiaryregidmapping: {}", e.getMessage(), e); + return "Error during sync for m_beneficiaryregidmapping: " + e.getMessage(); } + } else { + logger.info("No data to sync for m_beneficiaryregidmapping."); + return "data sync passed"; } + } - private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String tableName) { - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append(" SET "); - queryBuilder.append("Provisioned = true, SyncedDate = now(), syncedBy = ?"); - queryBuilder.append(" WHERE "); - queryBuilder.append(" BenRegId = ? "); - queryBuilder.append(" AND "); - queryBuilder.append(" BeneficiaryID = ? "); - queryBuilder.append(" AND "); - queryBuilder.append(" VanID = ? "); - - return queryBuilder.toString(); - } + private String getqueryFor_M_BeneficiaryRegIdMapping(String schemaName, String tableName) { + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append("Provisioned = true, SyncedDate = now(), syncedBy = ?"); + queryBuilder.append(" WHERE "); + queryBuilder.append(" BenRegId = ? "); + queryBuilder.append(" AND "); + queryBuilder.append(" BeneficiaryID = ? "); + queryBuilder.append(" AND "); + queryBuilder.append(" VanID = ? "); - public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { - List syncData = new ArrayList<>(); + return queryBuilder.toString(); + } - String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), - syncUploadDataDigester.getTableName()); + public String update_I_BeneficiaryDetails_for_processed_in_batches(SyncUploadDataDigester syncUploadDataDigester) { + List syncData = new ArrayList<>(); - int limit = 1000; - int offset = 0; - int totalProcessed = 0; + String query = getQueryFor_I_BeneficiaryDetails(syncUploadDataDigester.getSchemaName(), + syncUploadDataDigester.getTableName()); - String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly + int limit = 1000; + int offset = 0; + int totalProcessed = 0; - while (true) { - List> batch; - try { + String problematicWhereClause = " WHERE Processed <> 'P' AND VanID IS NOT NULL "; // Define it explicitly - batch = dataSyncRepositoryCentral.getBatchForBenDetails( - syncUploadDataDigester, - problematicWhereClause, - limit, - offset); - } catch (Exception e) { - logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); - return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); - } + while (true) { + List> batch; + try { - if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a - // "success" - logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); - return "data sync passed"; - } else { - logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); - return "No data processed or sync failed for i_beneficiarydetails."; - } + batch = dataSyncRepositoryCentral.getBatchForBenDetails( + syncUploadDataDigester, + problematicWhereClause, + limit, + offset); + } catch (Exception e) { + logger.error("Error fetching batch for i_beneficiarydetails: {}", e.getMessage(), e); + return "Error fetching data for i_beneficiarydetails: " + e.getMessage(); } - } - private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { - StringBuilder queryBuilder = new StringBuilder(" UPDATE "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append(" SET "); - queryBuilder.append("Processed = 'P', SyncedDate = now(), SyncedBy = ? "); - queryBuilder.append(" WHERE "); - queryBuilder.append("BeneficiaryDetailsId = ? "); - queryBuilder.append(" AND "); - queryBuilder.append("VanID = ? "); - return queryBuilder.toString(); + if (totalProcessed > 0 || syncData.isEmpty()) { // syncData.isEmpty() means no records to process, still a + // "success" + logger.info("Finished processing i_beneficiarydetails. Total records processed: {}", totalProcessed); + return "data sync passed"; + } else { + logger.error("No records were processed for i_beneficiarydetails or an unknown error occurred."); + return "No data processed or sync failed for i_beneficiarydetails."; + } } + } - /** - * Handles the generic synchronization logic for tables not covered by specific - * handlers. - */ - -// private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, List syncResults) { -// List> dataToBesync = syncUploadDataDigester.getSyncData(); -// List syncDataListInsert = new ArrayList<>(); -// List syncDataListUpdate = new ArrayList<>(); -// // List syncResults = new ArrayList<>(); - -// boolean overallSuccess = true; - -// if (dataToBesync == null || dataToBesync.isEmpty()) { -// logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); -// return true; // Nothing to sync, consider it a success -// } - -// String syncTableName = syncUploadDataDigester.getTableName(); -// String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); -// String schemaName = syncUploadDataDigester.getSchemaName(); -// Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); -// String serverColumns = syncUploadDataDigester.getServerColumns(); - -// int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); - -// List serverColumnsList = Arrays.asList(serverColumns.split(",")); - -// for (Map map : dataToBesync) { -// // Create a new map with clean column names as keys -// Map cleanRecord = new HashMap<>(); -// for (String key : map.keySet()) { -// String cleanKey = key; -// // Handle keys with SQL functions like date_format -// if (key.startsWith("date_format(") && key.endsWith(")")) { -// int start = key.indexOf("(") + 1; -// int end = key.indexOf(","); -// if (end > start) { -// cleanKey = key.substring(start, end).trim(); -// } else { -// // Fallback if format is unexpected -// cleanKey = key.substring(start, key.indexOf(")")).trim(); -// } -// } -// cleanRecord.put(cleanKey.trim(), map.get(key)); -// } - -// String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); -// String vanID = String.valueOf(cleanRecord.get("VanID")); -// int syncFacilityID = 0; - -// // Update SyncedBy and SyncedDate in the xmap itself before processing -// cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); -// cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); - -// if (facilityIDFromDigester != null) { -// // Determine the 'Processed' status based on facility ID for specific tables -// switch (syncTableName.toLowerCase()) { -// case "t_indent": -// case "t_indentorder": { -// if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { -// Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); -// if (fromFacilityID.intValue() == facilityIDFromDigester) { -// cleanRecord.put("Processed", "P"); -// } -// } -// break; -// } -// case "t_indentissue": { -// if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { -// Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); -// if (toFacilityID.intValue() == facilityIDFromDigester) { -// cleanRecord.put("Processed", "P"); -// } -// } -// break; -// } -// case "t_stocktransfer": { -// if (cleanRecord.containsKey("TransferToFacilityID") -// && cleanRecord.get("TransferToFacilityID") instanceof Number) { -// Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); -// if (transferToFacilityID.intValue() == facilityIDFromDigester) { -// cleanRecord.put("Processed", "P"); -// } -// } -// break; -// } -// case "t_itemstockentry": { -// if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { -// Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); -// if (mapFacilityID.intValue() == facilityIDFromDigester) { -// cleanRecord.put("Processed", "P"); -// } -// } -// break; -// } -// default: -// // No specific facility ID logic for other tables -// break; -// } -// } - -// // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot -// if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { -// syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); -// } - -// int recordCheck; -// try { -// recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( -// schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); -// logger.info("Test Record check="+recordCheck); -// logger.info("Test all the data="+ schemaName +":: Tble="+ syncTableName+":: vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: vanAutoIncColumnName="+vanAutoIncColumnName+":: syncFacilityID="+syncFacilityID); -// } catch (Exception e) { -// logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", -// syncTableName, vanSerialNo, vanID, e.getMessage(), e); -// return false; // Critical error, stop sync for this table -// } - -// // Prepare Object array for insert/update -// List currentRecordValues = new ArrayList<>(); -// for (String column : serverColumnsList) { -// Object value = cleanRecord.get(column.trim()); -// if (value instanceof Boolean) { -// currentRecordValues.add(value); -// } else if (value != null) { -// currentRecordValues.add(String.valueOf(value)); -// } else { -// currentRecordValues.add(null); -// } -// } - -// Object[] objArr = currentRecordValues.toArray(); -// logger.info("Test Obj Arr {}",objArr); -// if (recordCheck == 0) { -// syncDataListInsert.add(objArr); -// syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); -// } else { -// // For update, append the WHERE clause parameters at the end of the array -// List updateParams = new ArrayList<>(Arrays.asList(objArr)); -// updateParams.add(String.valueOf(vanSerialNo)); - -// if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", -// "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", -// "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") -// .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { -// updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); -// } else { -// updateParams.add(String.valueOf(vanID)); -// } -// syncDataListUpdate.add(updateParams.toArray()); -// syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, syncUploadDataDigester.getSyncedBy(), true, null)); - -// } -// } - -// boolean insertSuccess = true; -// boolean updateSuccess = true; - -// if (!syncDataListInsert.isEmpty()) { -// String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); -// logger.info("Query Insert="+queryInsert); -// try { -// int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryInsert, syncDataListInsert); -// for (int k = 0; k < i.length; k++) { -// if (i[k] < 1) { -// syncResults.set(k, new SyncResult(schemaName, syncTableName, -// String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), // VanSerialNo position -// syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); -// } -// } -// } -// catch (Exception e) { -// String shortReason; -// if (e.getMessage() != null) { -// if (e.getMessage().toLowerCase().contains("duplicate")) { -// shortReason = "Duplicate entry"; -// } else if (e.getMessage().toLowerCase().contains("constraint")) { -// shortReason = "Constraint violation"; -// } else if (e.getMessage().toLowerCase().contains("timeout")) { -// shortReason = "DB timeout"; -// } else { -// shortReason = "Insert/Update failed"; -// } -// } else { -// shortReason = "Unknown DB error"; -// } -// // Always add, never set, to avoid index errors -// for (int k = 0; k < syncDataListInsert.size(); k++) { -// syncResults.add(new SyncResult(schemaName, syncTableName, -// String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), -// syncUploadDataDigester.getSyncedBy(), false, shortReason)); -// } -// } - -// // try { -// // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, -// // serverColumns, queryInsert, syncDataListInsert); -// // logger.info("Insert result array length: {}", i.length); -// // logger.info("Expected insert size: {}", syncDataListInsert.size()); -// // if (i.length != syncDataListInsert.size()) { -// // insertSuccess = false; -// // logger.error("Partial insert for table {}. Expected {} inserts, got {}. Failed records: {}", -// // syncTableName, syncDataListInsert.size(), i.length, -// // getFailedRecords(i, syncDataListInsert)); -// // } else { -// // logger.info("Successfully inserted {} records into table {}.", i.length, syncTableName); -// // } -// // } catch (Exception e) { -// // insertSuccess = false; -// // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListInsert)); -// // logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); -// // } -// } - -// if (!syncDataListUpdate.isEmpty()) { -// String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); -// logger.info("Query Update="+queryUpdate); -// try { -// int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, serverColumns, queryUpdate, syncDataListUpdate); -// logger.info("Test Update result array length: {}", i.length); -// logger.info("Test Expected update size: {}", syncDataListUpdate.size()); -// for (int k = 0; k < i.length; k++) { -// if (i[k] < 1) { -// syncResults.set(k, new SyncResult(schemaName, syncTableName, -// String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), // VanSerialNo position -// syncUploadDataDigester.getSyncedBy(), false, "Update failed")); -// } -// } -// } -// catch (Exception e) { -// String shortReason; -// if (e.getMessage() != null) { -// if (e.getMessage().toLowerCase().contains("duplicate")) { -// shortReason = "Duplicate entry"; -// } else if (e.getMessage().toLowerCase().contains("constraint")) { -// shortReason = "Constraint violation"; -// } else if (e.getMessage().toLowerCase().contains("timeout")) { -// shortReason = "DB timeout"; -// } else { -// shortReason = "Insert/Update failed"; -// } -// } else { -// shortReason = "Unknown DB error"; -// } -// for (int k = 0; k < syncDataListUpdate.size(); k++) { -// syncResults.add(new SyncResult(schemaName, syncTableName, -// String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), -// syncUploadDataDigester.getSyncedBy(), false, shortReason)); -// } -// } - -// // try { -// // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, -// // SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); -// // logger.info("Update result array length: {}", j.length); -// // logger.info("Expected update size: {}", syncDataListUpdate.size()); -// // if (j.length != syncDataListUpdate.size()) { -// // updateSuccess = false; -// // logger.error("Partial update for table {}. Expected {} updates, got {}. Failed records: {}", -// // syncTableName, syncDataListUpdate.size(), j.length, -// // getFailedRecords(j, syncDataListUpdate)); -// // } else { -// // logger.info("Successfully updated {} records in table {}.", j.length, syncTableName); -// // } -// // } catch (Exception e) { -// // updateSuccess = false; -// // logger.error("Get failed records="+getFailedRecords(new int[] {}, syncDataListUpdate)); -// // logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); -// // } -// } -// logger.info("Sync results for table {}: {}", syncTableName, syncResults); -// return insertSuccess && updateSuccess; -// } - -private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, List syncResults) { - List> dataToBesync = syncUploadDataDigester.getSyncData(); - List syncDataListInsert = new ArrayList<>(); - List syncDataListUpdate = new ArrayList<>(); - - // Track indices for insert and update operations - Map insertIndexMap = new HashMap<>(); // syncResults index -> insert list index - Map updateIndexMap = new HashMap<>(); // syncResults index -> update list index - - boolean overallSuccess = true; - - if (dataToBesync == null || dataToBesync.isEmpty()) { - logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); - return true; + private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableName) { + StringBuilder queryBuilder = new StringBuilder(" UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append("Processed = 'P', SyncedDate = now(), SyncedBy = ? "); + queryBuilder.append(" WHERE "); + queryBuilder.append("BeneficiaryDetailsId = ? "); + queryBuilder.append(" AND "); + queryBuilder.append("VanID = ? "); + return queryBuilder.toString(); } - String syncTableName = syncUploadDataDigester.getTableName(); - String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); - String schemaName = syncUploadDataDigester.getSchemaName(); - Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); - String serverColumns = syncUploadDataDigester.getServerColumns(); - - int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); - List serverColumnsList = Arrays.asList(serverColumns.split(",")); - - for (Map map : dataToBesync) { - // Create a new map with clean column names as keys - Map cleanRecord = new HashMap<>(); - for (String key : map.keySet()) { - String cleanKey = key; - // Handle keys with SQL functions like date_format - if (key.startsWith("date_format(") && key.endsWith(")")) { - int start = key.indexOf("(") + 1; - int end = key.indexOf(","); - if (end > start) { - cleanKey = key.substring(start, end).trim(); - } else { - cleanKey = key.substring(start, key.indexOf(")")).trim(); + /** + * Handles the generic synchronization logic for tables not covered by specific + * handlers. + */ + + // private boolean performGenericTableSync(SyncUploadDataDigester + // syncUploadDataDigester, List syncResults) { + // List> dataToBesync = + // syncUploadDataDigester.getSyncData(); + // List syncDataListInsert = new ArrayList<>(); + // List syncDataListUpdate = new ArrayList<>(); + // // List syncResults = new ArrayList<>(); + + // boolean overallSuccess = true; + + // if (dataToBesync == null || dataToBesync.isEmpty()) { + // logger.info("No data to sync for table: {}", + // syncUploadDataDigester.getTableName()); + // return true; // Nothing to sync, consider it a success + // } + + // String syncTableName = syncUploadDataDigester.getTableName(); + // String vanAutoIncColumnName = + // syncUploadDataDigester.getVanAutoIncColumnName(); + // String schemaName = syncUploadDataDigester.getSchemaName(); + // Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + // String serverColumns = syncUploadDataDigester.getServerColumns(); + + // int vanSerialIndex = + // Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); + + // List serverColumnsList = Arrays.asList(serverColumns.split(",")); + + // for (Map map : dataToBesync) { + // // Create a new map with clean column names as keys + // Map cleanRecord = new HashMap<>(); + // for (String key : map.keySet()) { + // String cleanKey = key; + // // Handle keys with SQL functions like date_format + // if (key.startsWith("date_format(") && key.endsWith(")")) { + // int start = key.indexOf("(") + 1; + // int end = key.indexOf(","); + // if (end > start) { + // cleanKey = key.substring(start, end).trim(); + // } else { + // // Fallback if format is unexpected + // cleanKey = key.substring(start, key.indexOf(")")).trim(); + // } + // } + // cleanRecord.put(cleanKey.trim(), map.get(key)); + // } + + // String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); + // String vanID = String.valueOf(cleanRecord.get("VanID")); + // int syncFacilityID = 0; + + // // Update SyncedBy and SyncedDate in the xmap itself before processing + // cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + // cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); + + // if (facilityIDFromDigester != null) { + // // Determine the 'Processed' status based on facility ID for specific tables + // switch (syncTableName.toLowerCase()) { + // case "t_indent": + // case "t_indentorder": { + // if (cleanRecord.containsKey("FromFacilityID") && + // cleanRecord.get("FromFacilityID") instanceof Number) { + // Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); + // if (fromFacilityID.intValue() == facilityIDFromDigester) { + // cleanRecord.put("Processed", "P"); + // } + // } + // break; + // } + // case "t_indentissue": { + // if (cleanRecord.containsKey("ToFacilityID") && + // cleanRecord.get("ToFacilityID") instanceof Number) { + // Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); + // if (toFacilityID.intValue() == facilityIDFromDigester) { + // cleanRecord.put("Processed", "P"); + // } + // } + // break; + // } + // case "t_stocktransfer": { + // if (cleanRecord.containsKey("TransferToFacilityID") + // && cleanRecord.get("TransferToFacilityID") instanceof Number) { + // Number transferToFacilityID = (Number) + // cleanRecord.get("TransferToFacilityID"); + // if (transferToFacilityID.intValue() == facilityIDFromDigester) { + // cleanRecord.put("Processed", "P"); + // } + // } + // break; + // } + // case "t_itemstockentry": { + // if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") + // instanceof Number) { + // Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); + // if (mapFacilityID.intValue() == facilityIDFromDigester) { + // cleanRecord.put("Processed", "P"); + // } + // } + // break; + // } + // default: + // // No specific facility ID logic for other tables + // break; + // } + // } + + // // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + // if (cleanRecord.containsKey("SyncFacilityID") && + // cleanRecord.get("SyncFacilityID") instanceof Number) { + // syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); + // } + + // int recordCheck; + // try { + // recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + // schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, + // syncFacilityID); + // logger.info("Test Record check="+recordCheck); + // logger.info("Test all the data="+ schemaName +":: Tble="+ syncTableName+":: + // vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: + // vanAutoIncColumnName="+vanAutoIncColumnName+":: + // syncFacilityID="+syncFacilityID); + // } catch (Exception e) { + // logger.error("Error checking record existence for table {}: VanSerialNo={}, + // VanID={}. Error: {}", + // syncTableName, vanSerialNo, vanID, e.getMessage(), e); + // return false; // Critical error, stop sync for this table + // } + + // // Prepare Object array for insert/update + // List currentRecordValues = new ArrayList<>(); + // for (String column : serverColumnsList) { + // Object value = cleanRecord.get(column.trim()); + // if (value instanceof Boolean) { + // currentRecordValues.add(value); + // } else if (value != null) { + // currentRecordValues.add(String.valueOf(value)); + // } else { + // currentRecordValues.add(null); + // } + // } + + // Object[] objArr = currentRecordValues.toArray(); + // logger.info("Test Obj Arr {}",objArr); + // if (recordCheck == 0) { + // syncDataListInsert.add(objArr); + // syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, + // syncUploadDataDigester.getSyncedBy(), true, null)); + // } else { + // // For update, append the WHERE clause parameters at the end of the array + // List updateParams = new ArrayList<>(Arrays.asList(objArr)); + // updateParams.add(String.valueOf(vanSerialNo)); + + // if (Arrays.asList("t_patientissue", "t_physicalstockentry", + // "t_stockadjustment", "t_saitemmapping", + // "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + // "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + // .contains(syncTableName.toLowerCase()) && + // cleanRecord.containsKey("SyncFacilityID")) { + // updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + // } else { + // updateParams.add(String.valueOf(vanID)); + // } + // syncDataListUpdate.add(updateParams.toArray()); + // syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, + // syncUploadDataDigester.getSyncedBy(), true, null)); + + // } + // } + + // boolean insertSuccess = true; + // boolean updateSuccess = true; + + // if (!syncDataListInsert.isEmpty()) { + // String queryInsert = getQueryToInsertDataToServerDB(schemaName, + // syncTableName, serverColumns); + // logger.info("Query Insert="+queryInsert); + // try { + // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, + // syncTableName, serverColumns, queryInsert, syncDataListInsert); + // for (int k = 0; k < i.length; k++) { + // if (i[k] < 1) { + // syncResults.set(k, new SyncResult(schemaName, syncTableName, + // String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), // VanSerialNo + // position + // syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); + // } + // } + // } + // catch (Exception e) { + // String shortReason; + // if (e.getMessage() != null) { + // if (e.getMessage().toLowerCase().contains("duplicate")) { + // shortReason = "Duplicate entry"; + // } else if (e.getMessage().toLowerCase().contains("constraint")) { + // shortReason = "Constraint violation"; + // } else if (e.getMessage().toLowerCase().contains("timeout")) { + // shortReason = "DB timeout"; + // } else { + // shortReason = "Insert/Update failed"; + // } + // } else { + // shortReason = "Unknown DB error"; + // } + // // Always add, never set, to avoid index errors + // for (int k = 0; k < syncDataListInsert.size(); k++) { + // syncResults.add(new SyncResult(schemaName, syncTableName, + // String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), + // syncUploadDataDigester.getSyncedBy(), false, shortReason)); + // } + // } + + // // try { + // // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, + // syncTableName, + // // serverColumns, queryInsert, syncDataListInsert); + // // logger.info("Insert result array length: {}", i.length); + // // logger.info("Expected insert size: {}", syncDataListInsert.size()); + // // if (i.length != syncDataListInsert.size()) { + // // insertSuccess = false; + // // logger.error("Partial insert for table {}. Expected {} inserts, got {}. + // Failed records: {}", + // // syncTableName, syncDataListInsert.size(), i.length, + // // getFailedRecords(i, syncDataListInsert)); + // // } else { + // // logger.info("Successfully inserted {} records into table {}.", i.length, + // syncTableName); + // // } + // // } catch (Exception e) { + // // insertSuccess = false; + // // logger.error("Get failed records="+getFailedRecords(new int[] {}, + // syncDataListInsert)); + // // logger.error("Exception during insert for table {}: {}", syncTableName, + // e.getMessage(), e); + // // } + // } + + // if (!syncDataListUpdate.isEmpty()) { + // String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, + // serverColumns, syncTableName); + // logger.info("Query Update="+queryUpdate); + // try { + // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, + // syncTableName, serverColumns, queryUpdate, syncDataListUpdate); + // logger.info("Test Update result array length: {}", i.length); + // logger.info("Test Expected update size: {}", syncDataListUpdate.size()); + // for (int k = 0; k < i.length; k++) { + // if (i[k] < 1) { + // syncResults.set(k, new SyncResult(schemaName, syncTableName, + // String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), // VanSerialNo + // position + // syncUploadDataDigester.getSyncedBy(), false, "Update failed")); + // } + // } + // } + // catch (Exception e) { + // String shortReason; + // if (e.getMessage() != null) { + // if (e.getMessage().toLowerCase().contains("duplicate")) { + // shortReason = "Duplicate entry"; + // } else if (e.getMessage().toLowerCase().contains("constraint")) { + // shortReason = "Constraint violation"; + // } else if (e.getMessage().toLowerCase().contains("timeout")) { + // shortReason = "DB timeout"; + // } else { + // shortReason = "Insert/Update failed"; + // } + // } else { + // shortReason = "Unknown DB error"; + // } + // for (int k = 0; k < syncDataListUpdate.size(); k++) { + // syncResults.add(new SyncResult(schemaName, syncTableName, + // String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), + // syncUploadDataDigester.getSyncedBy(), false, shortReason)); + // } + // } + + // // try { + // // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, + // syncTableName, + // // SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); + // // logger.info("Update result array length: {}", j.length); + // // logger.info("Expected update size: {}", syncDataListUpdate.size()); + // // if (j.length != syncDataListUpdate.size()) { + // // updateSuccess = false; + // // logger.error("Partial update for table {}. Expected {} updates, got {}. + // Failed records: {}", + // // syncTableName, syncDataListUpdate.size(), j.length, + // // getFailedRecords(j, syncDataListUpdate)); + // // } else { + // // logger.info("Successfully updated {} records in table {}.", j.length, + // syncTableName); + // // } + // // } catch (Exception e) { + // // updateSuccess = false; + // // logger.error("Get failed records="+getFailedRecords(new int[] {}, + // syncDataListUpdate)); + // // logger.error("Exception during update for table {}: {}", syncTableName, + // e.getMessage(), e); + // // } + // } + // logger.info("Sync results for table {}: {}", syncTableName, syncResults); + // return insertSuccess && updateSuccess; + // } + + private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, + List syncResults) { + List> dataToBesync = syncUploadDataDigester.getSyncData(); + List syncDataListInsert = new ArrayList<>(); + List syncDataListUpdate = new ArrayList<>(); + + // Track indices for insert and update operations + Map insertIndexMap = new HashMap<>(); // syncResults index -> insert list index + Map updateIndexMap = new HashMap<>(); // syncResults index -> update list index + + boolean overallSuccess = true; + + if (dataToBesync == null || dataToBesync.isEmpty()) { + logger.info("No data to sync for table: {}", syncUploadDataDigester.getTableName()); + return true; + } + + String syncTableName = syncUploadDataDigester.getTableName(); + String vanAutoIncColumnName = syncUploadDataDigester.getVanAutoIncColumnName(); + String schemaName = syncUploadDataDigester.getSchemaName(); + Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); + String serverColumns = syncUploadDataDigester.getServerColumns(); + + int vanSerialIndex = Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); + List serverColumnsList = Arrays.asList(serverColumns.split(",")); + + for (Map map : dataToBesync) { + // Create a new map with clean column names as keys + Map cleanRecord = new HashMap<>(); + for (String key : map.keySet()) { + String cleanKey = key; + // Handle keys with SQL functions like date_format + if (key.startsWith("date_format(") && key.endsWith(")")) { + int start = key.indexOf("(") + 1; + int end = key.indexOf(","); + if (end > start) { + cleanKey = key.substring(start, end).trim(); + } else { + cleanKey = key.substring(start, key.indexOf(")")).trim(); + } } + cleanRecord.put(cleanKey.trim(), map.get(key)); } - cleanRecord.put(cleanKey.trim(), map.get(key)); - } - String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); - String vanID = String.valueOf(cleanRecord.get("VanID")); - int syncFacilityID = 0; - - // Update SyncedBy and SyncedDate in the cleanRecord - cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); - cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); - - if (facilityIDFromDigester != null) { - // Determine the 'Processed' status based on facility ID for specific tables - switch (syncTableName.toLowerCase()) { - case "t_indent": - case "t_indentorder": { - if (cleanRecord.containsKey("FromFacilityID") && cleanRecord.get("FromFacilityID") instanceof Number) { - Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); - if (fromFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); + String vanID = String.valueOf(cleanRecord.get("VanID")); + int syncFacilityID = 0; + + // Update SyncedBy and SyncedDate in the cleanRecord + cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); + cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); + + if (facilityIDFromDigester != null) { + // Determine the 'Processed' status based on facility ID for specific tables + switch (syncTableName.toLowerCase()) { + case "t_indent": + case "t_indentorder": { + if (cleanRecord.containsKey("FromFacilityID") + && cleanRecord.get("FromFacilityID") instanceof Number) { + Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); + if (fromFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_indentissue": { - if (cleanRecord.containsKey("ToFacilityID") && cleanRecord.get("ToFacilityID") instanceof Number) { - Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); - if (toFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_indentissue": { + if (cleanRecord.containsKey("ToFacilityID") + && cleanRecord.get("ToFacilityID") instanceof Number) { + Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); + if (toFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_stocktransfer": { - if (cleanRecord.containsKey("TransferToFacilityID") - && cleanRecord.get("TransferToFacilityID") instanceof Number) { - Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); - if (transferToFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_stocktransfer": { + if (cleanRecord.containsKey("TransferToFacilityID") + && cleanRecord.get("TransferToFacilityID") instanceof Number) { + Number transferToFacilityID = (Number) cleanRecord.get("TransferToFacilityID"); + if (transferToFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; - } - case "t_itemstockentry": { - if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { - Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); - if (mapFacilityID.intValue() == facilityIDFromDigester) { - cleanRecord.put("Processed", "P"); + case "t_itemstockentry": { + if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") instanceof Number) { + Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); + if (mapFacilityID.intValue() == facilityIDFromDigester) { + cleanRecord.put("Processed", "P"); + } } + break; } - break; + default: + break; } - default: - break; } - } - // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot - if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { - syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); - } + // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot + if (cleanRecord.containsKey("SyncFacilityID") && cleanRecord.get("SyncFacilityID") instanceof Number) { + syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); + } - int recordCheck; - try { - recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); - logger.info("Record check result: {}", recordCheck); - } catch (Exception e) { - logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", - syncTableName, vanSerialNo, vanID, e.getMessage(), e); - - // Add failed result for this record - syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, "Record check failed")); - continue; // Skip to next record - } + int recordCheck; + try { + recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( + schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, syncFacilityID); + logger.info("Record check result: {}", recordCheck); + } catch (Exception e) { + logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", + syncTableName, vanSerialNo, vanID, e.getMessage(), e); + + // Store the complete error message from record check failure + String fullErrorMessage = "Record existence check failed: " + + (e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName() + " occurred"); + + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, fullErrorMessage)); + continue; // Skip to next record + } - // Prepare Object array for insert/update - List currentRecordValues = new ArrayList<>(); - for (String column : serverColumnsList) { - Object value = cleanRecord.get(column.trim()); - if (value instanceof Boolean) { - currentRecordValues.add(value); - } else if (value != null) { - currentRecordValues.add(String.valueOf(value)); - } else { - currentRecordValues.add(null); + // Prepare Object array for insert/update + List currentRecordValues = new ArrayList<>(); + for (String column : serverColumnsList) { + Object value = cleanRecord.get(column.trim()); + if (value instanceof Boolean) { + currentRecordValues.add(value); + } else if (value != null) { + currentRecordValues.add(String.valueOf(value)); + } else { + currentRecordValues.add(null); + } } - } - Object[] objArr = currentRecordValues.toArray(); - - // Add to syncResults first, then track the index - int currentSyncResultIndex = syncResults.size(); - syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), true, null)); // Initially set as success - - if (recordCheck == 0) { - // Record doesn't exist - INSERT - insertIndexMap.put(currentSyncResultIndex, syncDataListInsert.size()); - syncDataListInsert.add(objArr); - } else { - // Record exists - UPDATE - List updateParams = new ArrayList<>(Arrays.asList(objArr)); - updateParams.add(String.valueOf(vanSerialNo)); - - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { - updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + Object[] objArr = currentRecordValues.toArray(); + + // Add to syncResults first, then track the index + int currentSyncResultIndex = syncResults.size(); + syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), true, null)); // Initially set as success + + if (recordCheck == 0) { + // Record doesn't exist - INSERT + insertIndexMap.put(currentSyncResultIndex, syncDataListInsert.size()); + syncDataListInsert.add(objArr); } else { - updateParams.add(String.valueOf(vanID)); + // Record exists - UPDATE + List updateParams = new ArrayList<>(Arrays.asList(objArr)); + updateParams.add(String.valueOf(vanSerialNo)); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) && cleanRecord.containsKey("SyncFacilityID")) { + updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); + } else { + updateParams.add(String.valueOf(vanID)); + } + + updateIndexMap.put(currentSyncResultIndex, syncDataListUpdate.size()); + syncDataListUpdate.add(updateParams.toArray()); } - - updateIndexMap.put(currentSyncResultIndex, syncDataListUpdate.size()); - syncDataListUpdate.add(updateParams.toArray()); } - } - boolean insertSuccess = true; - boolean updateSuccess = true; - - // Process INSERT operations - if (!syncDataListInsert.isEmpty()) { - String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); - logger.info("Insert Query: {}", queryInsert); - - try { - int[] insertResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - serverColumns, queryInsert, syncDataListInsert); - - // Update syncResults based on insert results - for (Map.Entry entry : insertIndexMap.entrySet()) { - int syncResultIndex = entry.getKey(); - int insertListIndex = entry.getValue(); - - if (insertListIndex < insertResults.length && insertResults[insertListIndex] > 0) { - // Success - keep the existing success entry - logger.info("Successfully inserted record at index {}", insertListIndex); - } else { - // Failed - update the syncResults entry + boolean insertSuccess = true; + boolean updateSuccess = true; + + // Process INSERT operations + if (!syncDataListInsert.isEmpty()) { + String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); + logger.info("Insert Query: {}", queryInsert); + + try { + int[] insertResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + serverColumns, queryInsert, syncDataListInsert); + + // Update syncResults based on insert results + for (Map.Entry entry : insertIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int insertListIndex = entry.getValue(); + + if (insertListIndex < insertResults.length && insertResults[insertListIndex] > 0) { + // Success - keep the existing success entry + logger.info("Successfully inserted record at index {}", insertListIndex); + } else { + // Failed - update the syncResults entry with detailed reason + String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); + String detailedReason = "Insert operation failed. Database returned result code: " + + (insertListIndex < insertResults.length ? insertResults[insertListIndex] : "unknown") + + ". Result codes: 0=no rows affected (possibly due to constraint violation or duplicate key), " + + + "-2=operation failed, -3=operation succeeded but row count unknown, " + + "1=success with 1 row affected. VanSerialNo: " + vanSerialNo + + ", Table: " + syncTableName + ", Schema: " + schemaName; + + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, detailedReason)); + insertSuccess = false; + } + } + + } catch (Exception e) { + insertSuccess = false; + logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); + + // Store the complete exception message and details + String completeErrorMessage = buildCompleteErrorMessage(e, "INSERT", syncTableName, schemaName); + + // Update all insert-related syncResults to failed with complete error message + for (Map.Entry entry : insertIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int insertListIndex = entry.getValue(); String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); + + String recordSpecificError = completeErrorMessage + " | VanSerialNo: " + vanSerialNo + + " | Record Index: " + insertListIndex + " | Total Records in Batch: " + + syncDataListInsert.size(); + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); - insertSuccess = false; + syncUploadDataDigester.getSyncedBy(), false, recordSpecificError)); } } - - } catch (Exception e) { - insertSuccess = false; - logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); - - String shortReason = getShortErrorReason(e); - - // Update all insert-related syncResults to failed - for (Map.Entry entry : insertIndexMap.entrySet()) { - int syncResultIndex = entry.getKey(); - int insertListIndex = entry.getValue(); - String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); - - syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, shortReason)); - } } - } - // Process UPDATE operations - if (!syncDataListUpdate.isEmpty()) { - String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); - logger.info("Update Query: {}", queryUpdate); - - try { - int[] updateResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, - serverColumns, queryUpdate, syncDataListUpdate); - - // Update syncResults based on update results - for (Map.Entry entry : updateIndexMap.entrySet()) { - int syncResultIndex = entry.getKey(); - int updateListIndex = entry.getValue(); - - if (updateListIndex < updateResults.length && updateResults[updateListIndex] > 0) { - // Success - keep the existing success entry - logger.info("Successfully updated record at index {}", updateListIndex); - } else { - // Failed - update the syncResults entry + // Process UPDATE operations + if (!syncDataListUpdate.isEmpty()) { + String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); + logger.info("Update Query: {}", queryUpdate); + + try { + int[] updateResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, + serverColumns, queryUpdate, syncDataListUpdate); + + // Update syncResults based on update results + for (Map.Entry entry : updateIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int updateListIndex = entry.getValue(); + + if (updateListIndex < updateResults.length && updateResults[updateListIndex] > 0) { + // Success - keep the existing success entry + logger.info("Successfully updated record at index {}", updateListIndex); + } else { + // Failed - update the syncResults entry with detailed reason + String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); + String detailedReason = "Update operation failed. Database returned result code: " + + (updateListIndex < updateResults.length ? updateResults[updateListIndex] : "unknown") + + ". Result codes: 0=no rows affected (possibly record not found or no changes), " + + "-2=operation failed, -3=operation succeeded but row count unknown, " + + "1=success with 1 row affected. VanSerialNo: " + vanSerialNo + + ", Table: " + syncTableName + ", Schema: " + schemaName + + ", WHERE clause used VanSerialNo and " + + (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", + "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(syncTableName.toLowerCase()) ? "SyncFacilityID" : "VanID"); + + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, + syncUploadDataDigester.getSyncedBy(), false, detailedReason)); + updateSuccess = false; + } + } + + } catch (Exception e) { + updateSuccess = false; + logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); + + // Store the complete exception message and details + String completeErrorMessage = buildCompleteErrorMessage(e, "UPDATE", syncTableName, schemaName); + + // Update all update-related syncResults to failed with complete error message + for (Map.Entry entry : updateIndexMap.entrySet()) { + int syncResultIndex = entry.getKey(); + int updateListIndex = entry.getValue(); String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); + + String recordSpecificError = completeErrorMessage + " | VanSerialNo: " + vanSerialNo + + " | Record Index: " + updateListIndex + " | Total Records in Batch: " + + syncDataListUpdate.size(); + syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, "Update failed")); - updateSuccess = false; + syncUploadDataDigester.getSyncedBy(), false, recordSpecificError)); } } - - } catch (Exception e) { - updateSuccess = false; - logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); - - String shortReason = getShortErrorReason(e); - - // Update all update-related syncResults to failed - for (Map.Entry entry : updateIndexMap.entrySet()) { - int syncResultIndex = entry.getKey(); - int updateListIndex = entry.getValue(); - String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); - - syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, shortReason)); - } } + + logger.info("Sync results for table {}: {}", syncTableName, syncResults); + return insertSuccess && updateSuccess; } - logger.info("Sync results for table {}: {}", syncTableName, syncResults); - return insertSuccess && updateSuccess; -} - -// Helper method to get short error reason -private String getShortErrorReason(Exception e) { - if (e.getMessage() != null) { - String message = e.getMessage().toLowerCase(); - if (message.contains("duplicate")) { - return "Duplicate entry"; - } else if (message.contains("constraint")) { - return "Constraint violation"; - } else if (message.contains("timeout")) { - return "DB timeout"; - } else { - return "Database error"; + // Helper method to build complete error message with all available details + private String buildCompleteErrorMessage(Exception e, String operation, String tableName, String schemaName) { + StringBuilder errorMessage = new StringBuilder(); + + // Basic operation info + errorMessage.append(operation).append(" operation failed for table: ").append(schemaName).append(".") + .append(tableName); + + // Exception type + errorMessage.append(" | Exception Type: ").append(e.getClass().getSimpleName()); + + // Main error message + if (e.getMessage() != null && !e.getMessage().trim().isEmpty()) { + errorMessage.append(" | Error Message: ").append(e.getMessage()); + } + + // SQL specific details if it's a SQLException + if (e instanceof SQLException) { + SQLException sqlEx = (SQLException) e; + errorMessage.append(" | SQL Error Code: ").append(sqlEx.getErrorCode()); + errorMessage.append(" | SQL State: ").append(sqlEx.getSQLState()); } - } else { - return "Unknown error"; + + // Add timestamp + errorMessage.append(" | Error Timestamp: ").append(LocalDateTime.now()); + + // Cause chain if available + Throwable cause = e.getCause(); + int causeLevel = 1; + while (cause != null && causeLevel <= 3) { // Limit to 3 levels to prevent excessive length + errorMessage.append(" | Cause Level ").append(causeLevel).append(": ") + .append(cause.getClass().getSimpleName()); + if (cause.getMessage() != null && !cause.getMessage().trim().isEmpty()) { + errorMessage.append(" - ").append(cause.getMessage()); + } + cause = cause.getCause(); + causeLevel++; + } + + return errorMessage.toString(); } -} - private String getQueryToInsertDataToServerDB(String schemaName, String - tableName, String serverColumns) { + + private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { String[] columnsArr = null; if (serverColumns != null) - columnsArr = serverColumns.split(","); + columnsArr = serverColumns.split(","); StringBuilder preparedStatementSetter = new StringBuilder(); if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - preparedStatementSetter.append("?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } - } + for (int i = 0; i < columnsArr.length; i++) { + preparedStatementSetter.append("?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); + } + } } StringBuilder queryBuilder = new StringBuilder("INSERT INTO "); @@ -874,65 +977,64 @@ private String getQueryToInsertDataToServerDB(String schemaName, String queryBuilder.append(")"); logger.info("Test Query Builder: {}", queryBuilder.toString()); return queryBuilder.toString(); - } + } - public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { - String[] columnsArr = null; - if (serverColumns != null) - columnsArr = serverColumns.split(","); + public String getQueryToUpdateDataToServerDB(String schemaName, String serverColumns, String tableName) { + String[] columnsArr = null; + if (serverColumns != null) + columnsArr = serverColumns.split(","); - StringBuilder preparedStatementSetter = new StringBuilder(); + StringBuilder preparedStatementSetter = new StringBuilder(); - if (columnsArr != null && columnsArr.length > 0) { - for (int i = 0; i < columnsArr.length; i++) { - String column = columnsArr[i].trim(); - preparedStatementSetter.append(column).append(" = ?"); - if (i < columnsArr.length - 1) { - preparedStatementSetter.append(", "); - } + if (columnsArr != null && columnsArr.length > 0) { + for (int i = 0; i < columnsArr.length; i++) { + String column = columnsArr[i].trim(); + preparedStatementSetter.append(column).append(" = ?"); + if (i < columnsArr.length - 1) { + preparedStatementSetter.append(", "); } } + } - StringBuilder queryBuilder = new StringBuilder("UPDATE "); - queryBuilder.append(schemaName).append(".").append(tableName); - queryBuilder.append(" SET "); - queryBuilder.append(preparedStatementSetter); - queryBuilder.append(" WHERE VanSerialNo = ? "); - - if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(tableName.toLowerCase())) { - queryBuilder.append(" AND SyncFacilityID = ? "); - } else { - queryBuilder.append(" AND VanID = ? "); - } - logger.info("Test Query Builder: {}", queryBuilder.toString()); - return queryBuilder.toString(); + StringBuilder queryBuilder = new StringBuilder("UPDATE "); + queryBuilder.append(schemaName).append(".").append(tableName); + queryBuilder.append(" SET "); + queryBuilder.append(preparedStatementSetter); + queryBuilder.append(" WHERE VanSerialNo = ? "); + + if (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", "t_saitemmapping", + "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", + "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") + .contains(tableName.toLowerCase())) { + queryBuilder.append(" AND SyncFacilityID = ? "); + } else { + queryBuilder.append(" AND VanID = ? "); } + logger.info("Test Query Builder: {}", queryBuilder.toString()); + return queryBuilder.toString(); + } - // Helper to get information about failed records (for logging purposes) - private String getFailedRecords(int[] results, List data) { - logger.info("Inside get Failed Records"); - List failedRecordsInfo = new ArrayList<>(); - for (int k = 0; k < results.length; k++) { - // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or - // Statement.SUCCESS_NO_INFO - // usually indicates a failure or success without specific row count. - // A common return value for success is 1 (for one row updated/inserted). - if (results[k] < 1) { // Assuming 1 means success, and anything else (0, -2, etc.) means failure - // Attempt to get some identifiable info from the failed record - if (data.get(k).length > 0) { - failedRecordsInfo.add( - "Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); - } else { - failedRecordsInfo.add("Record at index " + k + " (No identifiable info)"); - } + // Helper to get information about failed records (for logging purposes) + private String getFailedRecords(int[] results, List data) { + logger.info("Inside get Failed Records"); + List failedRecordsInfo = new ArrayList<>(); + for (int k = 0; k < results.length; k++) { + // In Spring JDBC batchUpdate, a value of Statement.EXECUTE_FAILED or + // Statement.SUCCESS_NO_INFO + // usually indicates a failure or success without specific row count. + // A common return value for success is 1 (for one row updated/inserted). + if (results[k] < 1) { // Assuming 1 means success, and anything else (0, -2, etc.) means failure + // Attempt to get some identifiable info from the failed record + if (data.get(k).length > 0) { + failedRecordsInfo.add( + "Record at index " + k + " (VanSerialNo/ID: " + data.get(k)[data.get(k).length - 2] + ")"); + } else { + failedRecordsInfo.add("Record at index " + k + " (No identifiable info)"); } } - logger.info("Failed records info: {}", failedRecordsInfo); - return String.join("; ", failedRecordsInfo); } + logger.info("Failed records info: {}", failedRecordsInfo); + return String.join("; ", failedRecordsInfo); + } - - } \ No newline at end of file +} \ No newline at end of file From 24a9c01bdcdeac5dbd2d8343261d17235bf218bd Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 22:14:08 +0530 Subject: [PATCH 44/45] fix: fix the error message --- .../GetDataFromVanAndSyncToDBImpl.java | 491 ++++-------------- 1 file changed, 99 insertions(+), 392 deletions(-) diff --git a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java index 56704645..e338ba17 100644 --- a/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java +++ b/src/main/java/com/iemr/mmu/service/dataSyncLayerCentral/GetDataFromVanAndSyncToDBImpl.java @@ -99,14 +99,12 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E List> dataToBesync = syncUploadDataDigester.getSyncData(); List syncResults = new ArrayList<>(); // <-- define here - logger.info("Data to be synced: {}", dataToBesync); if (syncUploadDataDigester == null || syncUploadDataDigester.getTableName() == null) { logger.error("Invalid SyncUploadDataDigester object or tableName is null."); return "Error: Invalid sync request."; } String syncTableName = syncUploadDataDigester.getTableName(); - logger.info("Syncing data for table: {}", syncTableName); // Handle specific tables first, if their logic is distinct if ("m_beneficiaryregidmapping".equalsIgnoreCase(syncTableName)) { String result = update_M_BeneficiaryRegIdMapping_for_provisioned_benID(syncUploadDataDigester, syncResults); @@ -166,11 +164,6 @@ public String syncDataToServer(String requestOBJ, String Authorization) throws E } } - // if (syncSuccess) { - // return "Overall data sync passed."; - // } else { - // return "Overall data sync failed. Details: " + errorMessage; - // } Map responseMap = new HashMap<>(); responseMap.put("statusCode", 200); responseMap.put("message", "Data sync completed"); @@ -308,316 +301,6 @@ private String getQueryFor_I_BeneficiaryDetails(String schemaName, String tableN return queryBuilder.toString(); } - /** - * Handles the generic synchronization logic for tables not covered by specific - * handlers. - */ - - // private boolean performGenericTableSync(SyncUploadDataDigester - // syncUploadDataDigester, List syncResults) { - // List> dataToBesync = - // syncUploadDataDigester.getSyncData(); - // List syncDataListInsert = new ArrayList<>(); - // List syncDataListUpdate = new ArrayList<>(); - // // List syncResults = new ArrayList<>(); - - // boolean overallSuccess = true; - - // if (dataToBesync == null || dataToBesync.isEmpty()) { - // logger.info("No data to sync for table: {}", - // syncUploadDataDigester.getTableName()); - // return true; // Nothing to sync, consider it a success - // } - - // String syncTableName = syncUploadDataDigester.getTableName(); - // String vanAutoIncColumnName = - // syncUploadDataDigester.getVanAutoIncColumnName(); - // String schemaName = syncUploadDataDigester.getSchemaName(); - // Integer facilityIDFromDigester = syncUploadDataDigester.getFacilityID(); - // String serverColumns = syncUploadDataDigester.getServerColumns(); - - // int vanSerialIndex = - // Arrays.asList(serverColumns.split(",")).indexOf(vanAutoIncColumnName); - - // List serverColumnsList = Arrays.asList(serverColumns.split(",")); - - // for (Map map : dataToBesync) { - // // Create a new map with clean column names as keys - // Map cleanRecord = new HashMap<>(); - // for (String key : map.keySet()) { - // String cleanKey = key; - // // Handle keys with SQL functions like date_format - // if (key.startsWith("date_format(") && key.endsWith(")")) { - // int start = key.indexOf("(") + 1; - // int end = key.indexOf(","); - // if (end > start) { - // cleanKey = key.substring(start, end).trim(); - // } else { - // // Fallback if format is unexpected - // cleanKey = key.substring(start, key.indexOf(")")).trim(); - // } - // } - // cleanRecord.put(cleanKey.trim(), map.get(key)); - // } - - // String vanSerialNo = String.valueOf(cleanRecord.get(vanAutoIncColumnName)); - // String vanID = String.valueOf(cleanRecord.get("VanID")); - // int syncFacilityID = 0; - - // // Update SyncedBy and SyncedDate in the xmap itself before processing - // cleanRecord.put("SyncedBy", syncUploadDataDigester.getSyncedBy()); - // cleanRecord.put("SyncedDate", String.valueOf(LocalDateTime.now())); - - // if (facilityIDFromDigester != null) { - // // Determine the 'Processed' status based on facility ID for specific tables - // switch (syncTableName.toLowerCase()) { - // case "t_indent": - // case "t_indentorder": { - // if (cleanRecord.containsKey("FromFacilityID") && - // cleanRecord.get("FromFacilityID") instanceof Number) { - // Number fromFacilityID = (Number) cleanRecord.get("FromFacilityID"); - // if (fromFacilityID.intValue() == facilityIDFromDigester) { - // cleanRecord.put("Processed", "P"); - // } - // } - // break; - // } - // case "t_indentissue": { - // if (cleanRecord.containsKey("ToFacilityID") && - // cleanRecord.get("ToFacilityID") instanceof Number) { - // Number toFacilityID = (Number) cleanRecord.get("ToFacilityID"); - // if (toFacilityID.intValue() == facilityIDFromDigester) { - // cleanRecord.put("Processed", "P"); - // } - // } - // break; - // } - // case "t_stocktransfer": { - // if (cleanRecord.containsKey("TransferToFacilityID") - // && cleanRecord.get("TransferToFacilityID") instanceof Number) { - // Number transferToFacilityID = (Number) - // cleanRecord.get("TransferToFacilityID"); - // if (transferToFacilityID.intValue() == facilityIDFromDigester) { - // cleanRecord.put("Processed", "P"); - // } - // } - // break; - // } - // case "t_itemstockentry": { - // if (cleanRecord.containsKey("FacilityID") && cleanRecord.get("FacilityID") - // instanceof Number) { - // Number mapFacilityID = (Number) cleanRecord.get("FacilityID"); - // if (mapFacilityID.intValue() == facilityIDFromDigester) { - // cleanRecord.put("Processed", "P"); - // } - // } - // break; - // } - // default: - // // No specific facility ID logic for other tables - // break; - // } - // } - - // // Extract SyncFacilityID for checkRecordIsAlreadyPresentOrNot - // if (cleanRecord.containsKey("SyncFacilityID") && - // cleanRecord.get("SyncFacilityID") instanceof Number) { - // syncFacilityID = ((Number) cleanRecord.get("SyncFacilityID")).intValue(); - // } - - // int recordCheck; - // try { - // recordCheck = dataSyncRepositoryCentral.checkRecordIsAlreadyPresentOrNot( - // schemaName, syncTableName, vanSerialNo, vanID, vanAutoIncColumnName, - // syncFacilityID); - // logger.info("Test Record check="+recordCheck); - // logger.info("Test all the data="+ schemaName +":: Tble="+ syncTableName+":: - // vanSerialNo="+vanSerialNo+":: vanID="+vanID+":: - // vanAutoIncColumnName="+vanAutoIncColumnName+":: - // syncFacilityID="+syncFacilityID); - // } catch (Exception e) { - // logger.error("Error checking record existence for table {}: VanSerialNo={}, - // VanID={}. Error: {}", - // syncTableName, vanSerialNo, vanID, e.getMessage(), e); - // return false; // Critical error, stop sync for this table - // } - - // // Prepare Object array for insert/update - // List currentRecordValues = new ArrayList<>(); - // for (String column : serverColumnsList) { - // Object value = cleanRecord.get(column.trim()); - // if (value instanceof Boolean) { - // currentRecordValues.add(value); - // } else if (value != null) { - // currentRecordValues.add(String.valueOf(value)); - // } else { - // currentRecordValues.add(null); - // } - // } - - // Object[] objArr = currentRecordValues.toArray(); - // logger.info("Test Obj Arr {}",objArr); - // if (recordCheck == 0) { - // syncDataListInsert.add(objArr); - // syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, - // syncUploadDataDigester.getSyncedBy(), true, null)); - // } else { - // // For update, append the WHERE clause parameters at the end of the array - // List updateParams = new ArrayList<>(Arrays.asList(objArr)); - // updateParams.add(String.valueOf(vanSerialNo)); - - // if (Arrays.asList("t_patientissue", "t_physicalstockentry", - // "t_stockadjustment", "t_saitemmapping", - // "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - // "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - // .contains(syncTableName.toLowerCase()) && - // cleanRecord.containsKey("SyncFacilityID")) { - // updateParams.add(String.valueOf(cleanRecord.get("SyncFacilityID"))); - // } else { - // updateParams.add(String.valueOf(vanID)); - // } - // syncDataListUpdate.add(updateParams.toArray()); - // syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, - // syncUploadDataDigester.getSyncedBy(), true, null)); - - // } - // } - - // boolean insertSuccess = true; - // boolean updateSuccess = true; - - // if (!syncDataListInsert.isEmpty()) { - // String queryInsert = getQueryToInsertDataToServerDB(schemaName, - // syncTableName, serverColumns); - // logger.info("Query Insert="+queryInsert); - // try { - // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, - // syncTableName, serverColumns, queryInsert, syncDataListInsert); - // for (int k = 0; k < i.length; k++) { - // if (i[k] < 1) { - // syncResults.set(k, new SyncResult(schemaName, syncTableName, - // String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), // VanSerialNo - // position - // syncUploadDataDigester.getSyncedBy(), false, "Insert failed")); - // } - // } - // } - // catch (Exception e) { - // String shortReason; - // if (e.getMessage() != null) { - // if (e.getMessage().toLowerCase().contains("duplicate")) { - // shortReason = "Duplicate entry"; - // } else if (e.getMessage().toLowerCase().contains("constraint")) { - // shortReason = "Constraint violation"; - // } else if (e.getMessage().toLowerCase().contains("timeout")) { - // shortReason = "DB timeout"; - // } else { - // shortReason = "Insert/Update failed"; - // } - // } else { - // shortReason = "Unknown DB error"; - // } - // // Always add, never set, to avoid index errors - // for (int k = 0; k < syncDataListInsert.size(); k++) { - // syncResults.add(new SyncResult(schemaName, syncTableName, - // String.valueOf(syncDataListInsert.get(k)[vanSerialIndex]), - // syncUploadDataDigester.getSyncedBy(), false, shortReason)); - // } - // } - - // // try { - // // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, - // syncTableName, - // // serverColumns, queryInsert, syncDataListInsert); - // // logger.info("Insert result array length: {}", i.length); - // // logger.info("Expected insert size: {}", syncDataListInsert.size()); - // // if (i.length != syncDataListInsert.size()) { - // // insertSuccess = false; - // // logger.error("Partial insert for table {}. Expected {} inserts, got {}. - // Failed records: {}", - // // syncTableName, syncDataListInsert.size(), i.length, - // // getFailedRecords(i, syncDataListInsert)); - // // } else { - // // logger.info("Successfully inserted {} records into table {}.", i.length, - // syncTableName); - // // } - // // } catch (Exception e) { - // // insertSuccess = false; - // // logger.error("Get failed records="+getFailedRecords(new int[] {}, - // syncDataListInsert)); - // // logger.error("Exception during insert for table {}: {}", syncTableName, - // e.getMessage(), e); - // // } - // } - - // if (!syncDataListUpdate.isEmpty()) { - // String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, - // serverColumns, syncTableName); - // logger.info("Query Update="+queryUpdate); - // try { - // int[] i = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, - // syncTableName, serverColumns, queryUpdate, syncDataListUpdate); - // logger.info("Test Update result array length: {}", i.length); - // logger.info("Test Expected update size: {}", syncDataListUpdate.size()); - // for (int k = 0; k < i.length; k++) { - // if (i[k] < 1) { - // syncResults.set(k, new SyncResult(schemaName, syncTableName, - // String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), // VanSerialNo - // position - // syncUploadDataDigester.getSyncedBy(), false, "Update failed")); - // } - // } - // } - // catch (Exception e) { - // String shortReason; - // if (e.getMessage() != null) { - // if (e.getMessage().toLowerCase().contains("duplicate")) { - // shortReason = "Duplicate entry"; - // } else if (e.getMessage().toLowerCase().contains("constraint")) { - // shortReason = "Constraint violation"; - // } else if (e.getMessage().toLowerCase().contains("timeout")) { - // shortReason = "DB timeout"; - // } else { - // shortReason = "Insert/Update failed"; - // } - // } else { - // shortReason = "Unknown DB error"; - // } - // for (int k = 0; k < syncDataListUpdate.size(); k++) { - // syncResults.add(new SyncResult(schemaName, syncTableName, - // String.valueOf(syncDataListUpdate.get(k)[vanSerialIndex]), - // syncUploadDataDigester.getSyncedBy(), false, shortReason)); - // } - // } - - // // try { - // // int[] j = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, - // syncTableName, - // // SERVER_COLUMNS_NOT_REQUIRED, queryUpdate, syncDataListUpdate); - // // logger.info("Update result array length: {}", j.length); - // // logger.info("Expected update size: {}", syncDataListUpdate.size()); - // // if (j.length != syncDataListUpdate.size()) { - // // updateSuccess = false; - // // logger.error("Partial update for table {}. Expected {} updates, got {}. - // Failed records: {}", - // // syncTableName, syncDataListUpdate.size(), j.length, - // // getFailedRecords(j, syncDataListUpdate)); - // // } else { - // // logger.info("Successfully updated {} records in table {}.", j.length, - // syncTableName); - // // } - // // } catch (Exception e) { - // // updateSuccess = false; - // // logger.error("Get failed records="+getFailedRecords(new int[] {}, - // syncDataListUpdate)); - // // logger.error("Exception during update for table {}: {}", syncTableName, - // e.getMessage(), e); - // // } - // } - // logger.info("Sync results for table {}: {}", syncTableName, syncResults); - // return insertSuccess && updateSuccess; - // } - private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDigester, List syncResults) { List> dataToBesync = syncUploadDataDigester.getSyncData(); @@ -732,12 +415,11 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig logger.error("Error checking record existence for table {}: VanSerialNo={}, VanID={}. Error: {}", syncTableName, vanSerialNo, vanID, e.getMessage(), e); - // Store the complete error message from record check failure - String fullErrorMessage = "Record existence check failed: " + - (e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName() + " occurred"); + // Store the main error reason from record check failure + String mainErrorReason = "Record check failed: " + extractMainErrorReason(e); syncResults.add(new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, fullErrorMessage)); + syncUploadDataDigester.getSyncedBy(), false, mainErrorReason)); continue; // Skip to next record } @@ -790,7 +472,6 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig // Process INSERT operations if (!syncDataListInsert.isEmpty()) { String queryInsert = getQueryToInsertDataToServerDB(schemaName, syncTableName, serverColumns); - logger.info("Insert Query: {}", queryInsert); try { int[] insertResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, @@ -805,18 +486,14 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig // Success - keep the existing success entry logger.info("Successfully inserted record at index {}", insertListIndex); } else { - // Failed - update the syncResults entry with detailed reason + // Failed - update the syncResults entry with concise reason String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); - String detailedReason = "Insert operation failed. Database returned result code: " + - (insertListIndex < insertResults.length ? insertResults[insertListIndex] : "unknown") + - ". Result codes: 0=no rows affected (possibly due to constraint violation or duplicate key), " - + - "-2=operation failed, -3=operation succeeded but row count unknown, " + - "1=success with 1 row affected. VanSerialNo: " + vanSerialNo + - ", Table: " + syncTableName + ", Schema: " + schemaName; + String conciseReason = "Insert failed (code: " + + (insertListIndex < insertResults.length ? insertResults[insertListIndex] : "unknown") + + ")"; syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, detailedReason)); + syncUploadDataDigester.getSyncedBy(), false, conciseReason)); insertSuccess = false; } } @@ -825,21 +502,17 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig insertSuccess = false; logger.error("Exception during insert for table {}: {}", syncTableName, e.getMessage(), e); - // Store the complete exception message and details - String completeErrorMessage = buildCompleteErrorMessage(e, "INSERT", syncTableName, schemaName); + // Store the main error reason instead of complete exception message + String mainErrorReason = extractMainErrorReason(e); - // Update all insert-related syncResults to failed with complete error message + // Update all insert-related syncResults to failed with concise error message for (Map.Entry entry : insertIndexMap.entrySet()) { int syncResultIndex = entry.getKey(); int insertListIndex = entry.getValue(); String vanSerialNo = String.valueOf(syncDataListInsert.get(insertListIndex)[vanSerialIndex]); - String recordSpecificError = completeErrorMessage + " | VanSerialNo: " + vanSerialNo + - " | Record Index: " + insertListIndex + " | Total Records in Batch: " - + syncDataListInsert.size(); - syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, recordSpecificError)); + syncUploadDataDigester.getSyncedBy(), false, "INSERT: " + mainErrorReason)); } } } @@ -847,7 +520,6 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig // Process UPDATE operations if (!syncDataListUpdate.isEmpty()) { String queryUpdate = getQueryToUpdateDataToServerDB(schemaName, serverColumns, syncTableName); - logger.info("Update Query: {}", queryUpdate); try { int[] updateResults = dataSyncRepositoryCentral.syncDataToCentralDB(schemaName, syncTableName, @@ -862,23 +534,14 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig // Success - keep the existing success entry logger.info("Successfully updated record at index {}", updateListIndex); } else { - // Failed - update the syncResults entry with detailed reason + // Failed - update the syncResults entry with concise reason String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); - String detailedReason = "Update operation failed. Database returned result code: " + - (updateListIndex < updateResults.length ? updateResults[updateListIndex] : "unknown") + - ". Result codes: 0=no rows affected (possibly record not found or no changes), " + - "-2=operation failed, -3=operation succeeded but row count unknown, " + - "1=success with 1 row affected. VanSerialNo: " + vanSerialNo + - ", Table: " + syncTableName + ", Schema: " + schemaName + - ", WHERE clause used VanSerialNo and " + - (Arrays.asList("t_patientissue", "t_physicalstockentry", "t_stockadjustment", - "t_saitemmapping", - "t_stocktransfer", "t_patientreturn", "t_facilityconsumption", "t_indent", - "t_indentorder", "t_indentissue", "t_itemstockentry", "t_itemstockexit") - .contains(syncTableName.toLowerCase()) ? "SyncFacilityID" : "VanID"); + String conciseReason = "Update failed (code: " + + (updateListIndex < updateResults.length ? updateResults[updateListIndex] : "unknown") + + ")"; syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, detailedReason)); + syncUploadDataDigester.getSyncedBy(), false, conciseReason)); updateSuccess = false; } } @@ -887,21 +550,17 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig updateSuccess = false; logger.error("Exception during update for table {}: {}", syncTableName, e.getMessage(), e); - // Store the complete exception message and details - String completeErrorMessage = buildCompleteErrorMessage(e, "UPDATE", syncTableName, schemaName); + // Store the main error reason instead of complete exception message + String mainErrorReason = extractMainErrorReason(e); - // Update all update-related syncResults to failed with complete error message + // Update all update-related syncResults to failed with concise error message for (Map.Entry entry : updateIndexMap.entrySet()) { int syncResultIndex = entry.getKey(); int updateListIndex = entry.getValue(); String vanSerialNo = String.valueOf(syncDataListUpdate.get(updateListIndex)[vanSerialIndex]); - String recordSpecificError = completeErrorMessage + " | VanSerialNo: " + vanSerialNo + - " | Record Index: " + updateListIndex + " | Total Records in Batch: " - + syncDataListUpdate.size(); - syncResults.set(syncResultIndex, new SyncResult(schemaName, syncTableName, vanSerialNo, - syncUploadDataDigester.getSyncedBy(), false, recordSpecificError)); + syncUploadDataDigester.getSyncedBy(), false, "UPDATE: " + mainErrorReason)); } } } @@ -910,46 +569,94 @@ private boolean performGenericTableSync(SyncUploadDataDigester syncUploadDataDig return insertSuccess && updateSuccess; } - // Helper method to build complete error message with all available details - private String buildCompleteErrorMessage(Exception e, String operation, String tableName, String schemaName) { - StringBuilder errorMessage = new StringBuilder(); + // Helper method to extract concise but meaningful error message + private String extractMainErrorReason(Exception e) { + if (e == null) { + return "Unknown error"; + } + + String message = e.getMessage(); + if (message == null || message.trim().isEmpty()) { + return e.getClass().getSimpleName(); + } + + // Extract key information based on common error patterns + message = message.trim(); + + // Handle SQL constraint violations - extract the key constraint info + if (message.contains("Duplicate entry") && message.contains("for key")) { + // Extract: "Duplicate entry 'value' for key 'constraint_name'" + int keyStart = message.indexOf("for key '") + 9; + int keyEnd = message.indexOf("'", keyStart); + if (keyStart > 8 && keyEnd > keyStart) { + return "Duplicate key: " + message.substring(keyStart, keyEnd); + } + return "Duplicate entry error"; + } + + // Handle column cannot be null + if (message.contains("cannot be null")) { + int colStart = message.indexOf("Column '") + 8; + int colEnd = message.indexOf("'", colStart); + if (colStart > 7 && colEnd > colStart) { + return "Required field: " + message.substring(colStart, colEnd); + } + return "Required field missing"; + } - // Basic operation info - errorMessage.append(operation).append(" operation failed for table: ").append(schemaName).append(".") - .append(tableName); + // Handle data too long errors + if (message.contains("Data too long for column")) { + int colStart = message.indexOf("column '") + 8; + int colEnd = message.indexOf("'", colStart); + if (colStart > 7 && colEnd > colStart) { + return "Data too long: " + message.substring(colStart, colEnd); + } + return "Data length exceeded"; + } - // Exception type - errorMessage.append(" | Exception Type: ").append(e.getClass().getSimpleName()); + // Handle foreign key constraint violations + if (message.contains("foreign key constraint")) { + if (message.contains("CONSTRAINT `")) { + int constStart = message.indexOf("CONSTRAINT `") + 12; + int constEnd = message.indexOf("`", constStart); + if (constStart > 11 && constEnd > constStart) { + return "FK violation: " + message.substring(constStart, constEnd); + } + } + return "Foreign key constraint failed"; + } + + // Handle connection/timeout issues + if (message.toLowerCase().contains("timeout")) { + return "Database connection timeout"; + } - // Main error message - if (e.getMessage() != null && !e.getMessage().trim().isEmpty()) { - errorMessage.append(" | Error Message: ").append(e.getMessage()); + if (message.toLowerCase().contains("connection")) { + return "Database connection failed"; } - // SQL specific details if it's a SQLException - if (e instanceof SQLException) { - SQLException sqlEx = (SQLException) e; - errorMessage.append(" | SQL Error Code: ").append(sqlEx.getErrorCode()); - errorMessage.append(" | SQL State: ").append(sqlEx.getSQLState()); + // Handle table/schema issues + if (message.contains("doesn't exist")) { + return "Table/schema not found"; } - // Add timestamp - errorMessage.append(" | Error Timestamp: ").append(LocalDateTime.now()); - - // Cause chain if available - Throwable cause = e.getCause(); - int causeLevel = 1; - while (cause != null && causeLevel <= 3) { // Limit to 3 levels to prevent excessive length - errorMessage.append(" | Cause Level ").append(causeLevel).append(": ") - .append(cause.getClass().getSimpleName()); - if (cause.getMessage() != null && !cause.getMessage().trim().isEmpty()) { - errorMessage.append(" - ").append(cause.getMessage()); + // For other cases, try to get the first meaningful part of the message + // Split by common delimiters and take the first substantial part + String[] parts = message.split("[;:|]"); + for (String part : parts) { + part = part.trim(); + if (part.length() > 10 && part.length() <= 100) { // Reasonable length + return part; } - cause = cause.getCause(); - causeLevel++; } - return errorMessage.toString(); + // If message is short enough, return it as is + if (message.length() <= 150) { + return message; + } + + // Otherwise, truncate to first 150 characters + return message.substring(0, 150) + "..."; } private String getQueryToInsertDataToServerDB(String schemaName, String tableName, String serverColumns) { From 0f79647a8679a7616f1b46c76fa186fff6ea7f0e Mon Sep 17 00:00:00 2001 From: Vanitha Date: Wed, 24 Sep 2025 22:47:24 +0530 Subject: [PATCH 45/45] fix: update the version --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index b0e8639c..aa3c19c9 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ 4.0.0 com.iemr.mmu mmu-api - 3.4.0 + 3.4.1 war MMU-API