Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Multiple Import Metadata actions #1178

Merged
merged 18 commits into from
Jun 26, 2024
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
132 changes: 127 additions & 5 deletions src/main/java/com/autotune/analyzer/services/DSMetadataService.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,16 @@

package com.autotune.analyzer.services;

import com.autotune.analyzer.exceptions.KruizeResponse;
import com.autotune.analyzer.serviceObjects.DSMetadataAPIObject;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.analyzer.utils.AnalyzerErrorConstants;
import com.autotune.analyzer.utils.GsonUTCDateAdapter;
import com.autotune.common.data.ValidationOutputData;
import com.autotune.common.data.dataSourceMetadata.DataSourceMetadataInfo;
import com.autotune.common.datasource.DataSourceInfo;
import com.autotune.common.datasource.DataSourceManager;
import com.autotune.database.dao.ExperimentDAOImpl;
import com.autotune.database.service.ExperimentDBService;
import com.autotune.utils.KruizeSupportedTypes;
import com.autotune.utils.MetricsConfig;
Expand All @@ -38,10 +41,8 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import java.io.PrintWriter;
import java.util.*;
import java.util.stream.Collectors;

import static com.autotune.analyzer.utils.AnalyzerConstants.ServiceConstants.CHARACTER_ENCODING;
Expand Down Expand Up @@ -90,8 +91,25 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)

DataSourceInfo datasource = new ExperimentDBService().loadDataSourceFromDBByName(dataSourceName);
if(null != datasource) {
new DataSourceManager().importMetadataFromDataSource(datasource);
DataSourceMetadataInfo dataSourceMetadata = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, "false");
if (null != dataSourceMetadata) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Move all of the code that handles the doPost to the relevant files under datasource

ValidationOutputData validationOutputData = new ExperimentDAOImpl().deleteKruizeDSMetadataEntryByName(dataSourceName);

if (validationOutputData.isSuccess()) {
new DataSourceManager().deleteMetadataFromDataSource(datasource);
} else {
String errorMessage = validationOutputData.getMessage();
sendErrorResponse(
response,
new Exception(AnalyzerErrorConstants.APIErrors.DSMetadataAPI.MISSING_DATASOURCE_METADATA_EXCPTN),
HttpServletResponse.SC_BAD_REQUEST,
errorMessage
);
}
}

new DataSourceManager().importMetadataFromDataSource(datasource);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As mentioned in the review, this can cause DB corruption if for any reason the import fails as we would have removed the DB content. Please reverse the order and add validation to the import results

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Updated the code flow including validation of imported DataSourceMetadataInfo object

dataSourceMetadata = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, "false");
dataSourceMetadataMap.put(dataSourceName,dataSourceMetadata);
}

Expand Down Expand Up @@ -283,4 +301,108 @@ private Gson createGsonObject() {
private boolean isValidBooleanValue(String value) {
return value != null && (value.equals("true") || value.equals("false"));
}

/**
* TODO temp solution to delete metadata, Need to evaluate use cases
*
* @param request
* @param response
* @throws ServletException
* @throws IOException
*/
@Override
protected void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
dinogun marked this conversation as resolved.
Show resolved Hide resolved
HashMap<String, DataSourceMetadataInfo> dataSourceMetadataMap = new HashMap<>();
String inputData = "";
try {
// Set the character encoding of the request to UTF-8
request.setCharacterEncoding(CHARACTER_ENCODING);

inputData = request.getReader().lines().collect(Collectors.joining());
if (null == inputData || inputData.isEmpty()) {
throw new Exception("Request input data cannot be null or empty");
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Move all of the strings to string constants

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

}
DSMetadataAPIObject metadataAPIObject = new Gson().fromJson(inputData, DSMetadataAPIObject.class);
metadataAPIObject.validateInputFields();
String dataSourceName = metadataAPIObject.getDataSourceName();

if (null == dataSourceName || dataSourceName.isEmpty()) {
sendErrorResponse(
inputData,
response,
null,
HttpServletResponse.SC_BAD_REQUEST,
AnalyzerErrorConstants.APIErrors.DSMetadataAPI.DATASOURCE_NAME_MANDATORY);
}

DataSourceInfo datasource = new ExperimentDBService().loadDataSourceFromDBByName(dataSourceName);

if (null == datasource) {
sendErrorResponse(
inputData,
response,
new Exception(AnalyzerErrorConstants.APIErrors.DSMetadataAPI.INVALID_DATASOURCE_NAME_METADATA_EXCPTN),
HttpServletResponse.SC_BAD_REQUEST,
String.format(AnalyzerErrorConstants.APIErrors.DSMetadataAPI.DATASOURCE_METADATA_DELETE_ERROR_MSG, dataSourceName)
);
}

try {
DataSourceMetadataInfo dataSourceMetadata = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, "false");
if (null == dataSourceMetadata) {
sendErrorResponse(
inputData,
response,
new Exception(AnalyzerErrorConstants.APIErrors.DSMetadataAPI.MISSING_DATASOURCE_METADATA_EXCPTN),
HttpServletResponse.SC_BAD_REQUEST,
String.format(AnalyzerErrorConstants.APIErrors.DSMetadataAPI.DATASOURCE_METADATA_DELETE_ERROR_MSG, dataSourceName)
);
}
dataSourceMetadataMap.put(dataSourceName, dataSourceMetadata);

if (!dataSourceMetadataMap.isEmpty() && dataSourceMetadataMap.containsKey(dataSourceName)) {
ValidationOutputData validationOutputData = new ExperimentDAOImpl().deleteKruizeDSMetadataEntryByName(dataSourceName);
if (validationOutputData.isSuccess()) {
new DataSourceManager().deleteMetadataFromDataSource(datasource);
dataSourceMetadataMap.remove(dataSourceName);
} else {
sendErrorResponse(
inputData,
response,
new Exception(AnalyzerErrorConstants.APIErrors.DSMetadataAPI.DATASOURCE_METADATA_DELETE_EXCPTN),
HttpServletResponse.SC_BAD_REQUEST,
validationOutputData.getMessage()
);
}
} else {
sendErrorResponse(
inputData,
response,
new Exception(AnalyzerErrorConstants.APIErrors.DSMetadataAPI.DATASOURCE_METADATA_DELETE_EXCPTN),
HttpServletResponse.SC_BAD_REQUEST,
String.format(AnalyzerErrorConstants.APIErrors.DSMetadataAPI.DATASOURCE_METADATA_DELETE_ERROR_MSG, dataSourceName)
);
}
sendSuccessResponse(response, "Datasource metadata deleted successfully.");
} catch (Exception e) {
LOGGER.error("Loading saved metadata for datasource {} failed: {} ", dataSourceName, e.getMessage());
}

} catch (Exception e) {
sendErrorResponse(inputData, response, e, HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
}
}

private void sendSuccessResponse(HttpServletResponse response, String message) throws IOException {
response.setContentType(JSON_CONTENT_TYPE);
response.setCharacterEncoding(CHARACTER_ENCODING);
response.setStatus(HttpServletResponse.SC_CREATED);
PrintWriter out = response.getWriter();
out.append(
new Gson().toJson(
new KruizeResponse(message + " View imported metadata at GET /dsmetadata", HttpServletResponse.SC_CREATED, "", "SUCCESS")
)
);
out.flush();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,6 @@ public static final class ServiceConstants {
public static final String VERBOSE = "verbose";
public static final String FALSE = "false";


private ServiceConstants() {
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,8 @@ private DSMetadataAPI(){
public static final String MISSING_DATASOURCE_METADATA_MSG = "Metadata for a given datasource - \" %s \", cluster name - \" %s \", namespace - \"%s \" " +
"either does not exist or is not valid";
public static final String DATASOURCE_METADATA_IMPORT_ERROR_MSG = "Metadata cannot be imported for datasource - \" %s \" , either does not exist or is not valid";
public static final String DATASOURCE_METADATA_DELETE_EXCPTN = "Datasource metadata not found";
public static final String DATASOURCE_METADATA_DELETE_ERROR_MSG = "Metadata cannot be deleted for datasource - \" %s \" , either does not exist or is not valid";
public static final String INVALID_QUERY_PARAM = "The query param(s) - \" %s \" is/are invalid";
public static final String INVALID_QUERY_PARAM_VALUE = "The query param value(s) is/are invalid";
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,6 @@ public void importMetadataFromDataSource(DataSourceInfo dataSourceInfo) {
if (null == dataSourceInfo) {
throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_INFO);
}
String dataSourceName = dataSourceInfo.getName();
if(checkIfDataSourceMetadataExists(dataSourceName)) {
return;
}
dataSourceMetadataOperator.createDataSourceMetadata(dataSourceInfo);
// save the metadata to DB
saveMetadataFromDataSourceToDB(dataSourceInfo);
Expand Down
5 changes: 2 additions & 3 deletions src/main/java/com/autotune/database/dao/ExperimentDAO.java
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ public interface ExperimentDAO {
// add metadata
ValidationOutputData addMetadataToDB(KruizeDSMetadataEntry kruizeDSMetadataEntry);

// Load metadata
List<KruizeDSMetadataEntry> loadMetadata() throws Exception;

// Delete metadata
public ValidationOutputData deleteKruizeDSMetadataEntryByName(String dataSourceName);
Copy link
Contributor

@msvinaykumar msvinaykumar May 13, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Perhaps you could name it something like "removeClusterMetadataByName" or

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Have followed the similar naming convention as deleteKruizeExperimentEntryByName, please confirm if this still requires renaming.

}
50 changes: 38 additions & 12 deletions src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -500,6 +500,44 @@ public ValidationOutputData deleteKruizeExperimentEntryByName(String experimentN
return validationOutputData;
}

/**
* Delete metadata with the name dataSourceName
* This deletes the metadata from the KruizeDSMetadataEntry table
* @param dataSourceName
* @return
*/
@Override
public ValidationOutputData deleteKruizeDSMetadataEntryByName(String dataSourceName) {
ValidationOutputData validationOutputData = new ValidationOutputData(false, null, null);
Transaction tx = null;
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
try {
tx = session.beginTransaction();
Query query = session.createQuery(DELETE_FROM_METADATA_BY_DATASOURCE_NAME, null);
query.setParameter("dataSourceName", dataSourceName);
int deletedCount = query.executeUpdate();

if (deletedCount == 0) {
validationOutputData.setSuccess(false);
dinogun marked this conversation as resolved.
Show resolved Hide resolved
validationOutputData.setMessage("KruizeDSMetadataEntry not found with datasource name: " + dataSourceName);
} else {
validationOutputData.setSuccess(true);
}
tx.commit();
} catch (HibernateException e) {
LOGGER.error("Not able to delete metadata for datasource {} due to {}", dataSourceName, e.getMessage());
if (tx != null) tx.rollback();
e.printStackTrace();
validationOutputData.setSuccess(false);
validationOutputData.setMessage(e.getMessage());
//todo save error to API_ERROR_LOG
}
} catch (Exception e) {
LOGGER.error("Not able to delete metadata for datasource {} due to {}", dataSourceName, e.getMessage());
}
return validationOutputData;
}

@Override
public List<KruizeExperimentEntry> loadAllExperiments() throws Exception {
//todo load only experimentStatus=inprogress , playback may not require completed experiments
Expand Down Expand Up @@ -876,18 +914,6 @@ public List<KruizeDSMetadataEntry> loadMetadataByNamespace(String dataSourceName
return kruizeMetadataList;
}

@Override
public List<KruizeDSMetadataEntry> loadMetadata() throws Exception {
List<KruizeDSMetadataEntry> kruizeMetadataList;
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
kruizeMetadataList = session.createQuery(SELECT_FROM_METADATA, KruizeDSMetadataEntry.class).list();
} catch (Exception e) {
LOGGER.error("Unable to load metadata : {}", e.getMessage());
throw new Exception("Error while loading existing metadata object from database : " + e.getMessage());
}
return kruizeMetadataList;
}

/**
* @param name
* @return single element list of datasource after fetching from the DB
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ public static final class SQLQUERY {
public static final String SELECT_FROM_RESULTS_BY_EXP_NAME = "from KruizeResultsEntry k WHERE k.experiment_name = :experimentName";
public static final String SELECT_FROM_DATASOURCE = "from KruizeDataSourceEntry";
public static final String SELECT_FROM_DATASOURCE_BY_NAME = "from KruizeDataSourceEntry kd WHERE kd.name = :name";
public static final String SELECT_FROM_METADATA = "from KruizeDSMetadataEntry";
public static final String SELECT_FROM_METADATA_BY_DATASOURCE_NAME = "from KruizeDSMetadataEntry km WHERE km.datasource_name = :dataSourceName";
public static final String SELECT_FROM_METADATA_BY_DATASOURCE_NAME_AND_CLUSTER_NAME =
String.format("from KruizeDSMetadataEntry km " +
Expand Down Expand Up @@ -61,6 +60,7 @@ public static final class SQLQUERY {
public static final String DELETE_FROM_EXPERIMENTS_BY_EXP_NAME = "DELETE FROM KruizeExperimentEntry k WHERE k.experiment_name = :experimentName";
public static final String DELETE_FROM_RESULTS_BY_EXP_NAME = "DELETE FROM KruizeResultsEntry k WHERE k.experiment_name = :experimentName";
public static final String DELETE_FROM_RECOMMENDATIONS_BY_EXP_NAME = "DELETE FROM KruizeRecommendationEntry k WHERE k.experiment_name = :experimentName";
public static final String DELETE_FROM_METADATA_BY_DATASOURCE_NAME = "DELETE FROM KruizeDSMetadataEntry km WHERE km.datasource_name = :dataSourceName";
public static final String DB_PARTITION_DATERANGE = "CREATE TABLE IF NOT EXISTS %s_%s%s%s PARTITION OF %s FOR VALUES FROM ('%s-%s-%s 00:00:00.000') TO ('%s-%s-%s 23:59:59');";
public static final String SELECT_ALL_KRUIZE_TABLES = "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' " +
"and (table_name like 'kruize_results_%' or table_name like 'kruize_recommendations_%') ";
Expand Down
Loading