Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

EN-12742 Add capability to disable a secondary instance from reading … #199

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-2.0.xsd">
<changeSet author="chi" id="20161231-add-disabled">
<preConditions onFail="MARK_RAN">
<not><columnExists tableName="dataset_internal_name_map" columnName="disabled"/></not>
</preConditions>
<addColumn tableName="dataset_internal_name_map">
<column name="disabled" type="timestamp with timezone"/>
</addColumn>
</changeSet>
</databaseChangeLog>
Expand Up @@ -12,5 +12,6 @@
<include file="com/socrata/pg/store/schema/20160107-create-computation-strategy-map.xml"/>
<include file="com/socrata/pg/store/schema/20160111-add-field-name-columns.xml"/>
<include file="com/socrata/pg/store/schema/20160420-add-copy-map-table-modifiers.xml"/>
<include file="com/socrata/pg/store/schema/20161231-add-disabled.xml"/>

</databaseChangeLog>
Expand Up @@ -6,16 +6,20 @@ import com.socrata.datacoordinator.id.DatasetId

class PGSecondaryDatasetMapReader(val conn: Connection) {
val idFromName =
"""SELECT dataset_system_id
"""SELECT dataset_system_id, disabled
| FROM dataset_internal_name_map
| WHERE dataset_internal_name = ?
""".stripMargin
def datasetIdForInternalName(datasetInternalName: String): Option[DatasetId] = {
def datasetIdForInternalName(datasetInternalName: String, checkDisabled: Boolean = false): Option[DatasetId] = {
using(conn.prepareStatement(idFromName)) { stmt =>
stmt.setString(1, datasetInternalName)
using(stmt.executeQuery()) { rs =>
if (rs.next()) {
Option(new DatasetId(rs.getLong("dataset_system_id")))
if (checkDisabled && rs.getDate("disabled") != null) {
None
} else {
Option(new DatasetId(rs.getLong("dataset_system_id")))
}
} else {
None
}
Expand Down
Expand Up @@ -188,7 +188,7 @@ class QueryServer(val dsInfo: DSInfo, val caseSensitivity: CaseSensitivity) exte
queryTimeout: Option[Duration]
) (resp:HttpServletResponse): Unit = {
withPgu(dsInfo, truthStoreDatasetInfo = None) { pgu =>
pgu.secondaryDatasetMapReader.datasetIdForInternalName(datasetId) match {
pgu.secondaryDatasetMapReader.datasetIdForInternalName(datasetId, checkDisabled = true) match {
case None =>
logger.info(s"Tried to perform query on dataset $datasetId")
NotFound(resp)
Expand Down Expand Up @@ -392,7 +392,7 @@ class QueryServer(val dsInfo: DSInfo, val caseSensitivity: CaseSensitivity) exte
def getRollups(ds: String, reqCopy: Option[String], includeUnmaterialized: Boolean): Option[Iterable[RollupInfo]] = {
withPgu(dsInfo, truthStoreDatasetInfo = None) { pgu =>
for {
datasetId <- pgu.secondaryDatasetMapReader.datasetIdForInternalName(ds)
datasetId <- pgu.secondaryDatasetMapReader.datasetIdForInternalName(ds, checkDisabled = true)
datasetInfo <- pgu.datasetMapReader.datasetInfo(datasetId)
} yield {
val copy = getCopy(pgu, datasetInfo, reqCopy)
Expand All @@ -412,7 +412,7 @@ class QueryServer(val dsInfo: DSInfo, val caseSensitivity: CaseSensitivity) exte
private def getCopy(pgu: PGSecondaryUniverse[SoQLType, SoQLValue], ds: String, reqCopy: Option[String])
: Option[CopyInfo] = {
for {
datasetId <- pgu.secondaryDatasetMapReader.datasetIdForInternalName(ds)
datasetId <- pgu.secondaryDatasetMapReader.datasetIdForInternalName(ds, checkDisabled = true)
datasetInfo <- pgu.datasetMapReader.datasetInfo(datasetId)
} yield {
getCopy(pgu, datasetInfo, reqCopy)
Expand Down