Skip to content

Commit

Permalink
removed some duplication
Browse files Browse the repository at this point in the history
  • Loading branch information
pvighi committed Nov 28, 2018
1 parent f641215 commit ae0696c
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import com.gu.sf_datalake_export.handlers.DownloadBatchHandler.WireState
import com.gu.sf_datalake_export.handlers.StartJobHandler.ShouldUploadToDataLake
import com.gu.sf_datalake_export.salesforce_bulk_api.BulkApiParams.ObjectName
import com.gu.sf_datalake_export.salesforce_bulk_api.GetBatchResult.JobName
import com.gu.sf_datalake_export.util.ExportS3Path
import com.gu.util.Logging
import com.gu.util.apigateway.ApiGatewayHandler.LambdaIO
import com.gu.util.config.Stage
Expand All @@ -23,7 +24,7 @@ object CleanBucketHandler extends Logging {
val objectName = ObjectName(state.objectName)
val jobName = JobName(state.jobName)
val shouldUploadToDataLake = ShouldUploadToDataLake(state.uploadToDataLake)
val wiredS3PathFor = s3PathFor(RawEffects.stage) _
val wiredS3PathFor = ExportS3Path(RawEffects.stage) _

cleanBucket(
wiredS3PathFor,
Expand Down Expand Up @@ -67,10 +68,4 @@ object CleanBucketHandler extends Logging {
} yield ()
}

//TODO THIS CODE IS DUPLICATED HERE AND IN THE DOWNLOADER!!
def s3PathFor(stage: Stage)(objectName: ObjectName, uploadToDataLake: ShouldUploadToDataLake): S3Path = stage match {
case Stage("PROD") if uploadToDataLake.value => S3Path(BucketName(s"ophan-raw-salesforce-customer-data-${objectName.value.toLowerCase}"), None)
case Stage(stageName) => S3Path(BucketName(s"gu-salesforce-export-${stageName.toLowerCase}"), None)
}

}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.gu.sf_datalake_export.handlers

import java.io.{InputStream, OutputStream}

import com.amazonaws.services.lambda.runtime.Context
import com.gu.effects._
import com.gu.salesforce.SalesforceAuthenticate.{SFAuthConfig, SFExportAuthConfig}
Expand All @@ -13,6 +14,7 @@ import com.gu.sf_datalake_export.salesforce_bulk_api.GetBatchResultId.{BatchResu
import com.gu.sf_datalake_export.salesforce_bulk_api.GetJobBatches._
import com.gu.sf_datalake_export.salesforce_bulk_api.S3UploadFile.{File, FileContent, FileName}
import com.gu.sf_datalake_export.salesforce_bulk_api.{GetBatchResult, GetBatchResultId, S3UploadFile}
import com.gu.sf_datalake_export.util.ExportS3Path
import com.gu.util.apigateway.ApiGatewayHandler.LambdaIO
import com.gu.util.config.{LoadConfigModule, Stage}
import com.gu.util.handlers.JsonHandler
Expand Down Expand Up @@ -100,7 +102,7 @@ object DownloadBatchHandler {
wiredGetBatchResultId = sfClient.wrapWith(GetBatchResultId.wrapper).runRequest _
wiredGetBatchResult = sfClient.wrapWith(GetBatchResult.wrapper).runRequest _
uploadFile = S3UploadFile(RawEffects.s3Write) _
wiredBasePathFor = uploadBasePath(stage) _
wiredBasePathFor = ExportS3Path(stage) _
wiredDownloadBatch = download(
uploadFile,
wiredGetBatchResultId,
Expand Down Expand Up @@ -143,10 +145,7 @@ object DownloadBatchHandler {
} yield ()
}

def uploadBasePath(stage: Stage)(objectName: ObjectName, uploadToDataLake: ShouldUploadToDataLake): S3Path = stage match {
case Stage("PROD") if uploadToDataLake.value => S3Path(BucketName(s"ophan-raw-salesforce-customer-data-${objectName.value.toLowerCase}"), None)
case Stage(stageName) => S3Path(BucketName(s"gu-salesforce-export-${stageName.toLowerCase}"), None)
}


case class ShouldCleanBucket(value: Boolean) extends AnyVal

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package com.gu.sf_datalake_export.util

import com.gu.effects.{BucketName, S3Path}
import com.gu.sf_datalake_export.handlers.StartJobHandler.ShouldUploadToDataLake
import com.gu.sf_datalake_export.salesforce_bulk_api.BulkApiParams.ObjectName
import com.gu.util.config.Stage

object ExportS3Path {
def apply(stage: Stage)(objectName: ObjectName, uploadToDataLake: ShouldUploadToDataLake): S3Path = stage match {
case Stage("PROD") if uploadToDataLake.value => S3Path(BucketName(s"ophan-raw-salesforce-customer-data-${objectName.value.toLowerCase}"), None)
case Stage(stageName) => S3Path(BucketName(s"gu-salesforce-export-${stageName.toLowerCase}"), None)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
package com.com.gu.sf_datalake_export

import com.gu.effects.{BucketName, S3Path}
import com.gu.sf_datalake_export.handlers.StartJobHandler.ShouldUploadToDataLake
import com.gu.sf_datalake_export.salesforce_bulk_api.BulkApiParams
import com.gu.sf_datalake_export.util.ExportS3Path
import com.gu.util.config.Stage
import org.scalatest.{FlatSpec, Matchers}

class ExportS3PathTest extends FlatSpec with Matchers {
"uploadBasePath" should "return ophan bucket basepath for PROD requests with uploadToDataLake enabled" in {
val contactName = BulkApiParams.contact.objectName
val actualBasePath = ExportS3Path(Stage("PROD"))(contactName, ShouldUploadToDataLake(true))
actualBasePath shouldBe S3Path(BucketName("ophan-raw-salesforce-customer-data-contact"), None)
}

it should "return test bucket basepath for PROD requests with uploadToDataLake disabled" in {
val contactName = BulkApiParams.contact.objectName
val actualBasePath = ExportS3Path(Stage("PROD"))(contactName, ShouldUploadToDataLake(false))
actualBasePath shouldBe S3Path(BucketName("gu-salesforce-export-prod"), None)
}

it should "return test bucket basepath for non PROD requests regardless of the uploadToDataLake param" in {
val contactName = BulkApiParams.contact.objectName
val codeBasePath = ExportS3Path(Stage("CODE"))(contactName, ShouldUploadToDataLake(false))
val codeBasePathUploadToDl = ExportS3Path(Stage("CODE"))(contactName, ShouldUploadToDataLake(false))
List(codeBasePath, codeBasePathUploadToDl).distinct shouldBe List(S3Path(BucketName("gu-salesforce-export-code"), None))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -104,25 +104,6 @@ class DownloadBatchHandlerTest extends FlatSpec with Matchers {
wiredDownloadBatch(JobName("someJobName"), JobId("someJobId"), BatchId("someBatchId"), testBasePath) shouldBe Success(())
}

"uploadBasePath" should "return ophan bucket basepath for PROD requests with uploadToDataLake enabled" in {
val contactName = BulkApiParams.contact.objectName
val actualBasePath = DownloadBatchHandler.uploadBasePath(Stage("PROD"))(contactName, ShouldUploadToDataLake(true))
actualBasePath shouldBe S3Path(BucketName("ophan-raw-salesforce-customer-data-contact"), None)
}

it should "return test bucket basepath for PROD requests with uploadToDataLake disabled" in {
val contactName = BulkApiParams.contact.objectName
val actualBasePath = DownloadBatchHandler.uploadBasePath(Stage("PROD"))(contactName, ShouldUploadToDataLake(false))
actualBasePath shouldBe S3Path(BucketName("gu-salesforce-export-prod"), None)
}

it should "return test bucket basepath for non PROD requests regardless of the uploadToDataLake param" in {
val contactName = BulkApiParams.contact.objectName
val codeBasePath = DownloadBatchHandler.uploadBasePath(Stage("CODE"))(contactName, ShouldUploadToDataLake(false))
val codeBasePathUploadToDl = DownloadBatchHandler.uploadBasePath(Stage("CODE"))(contactName, ShouldUploadToDataLake(false))
List(codeBasePath, codeBasePathUploadToDl).distinct shouldBe List(S3Path(BucketName("gu-salesforce-export-code"), None))
}

val wireBatch1 = WireBatchInfo(batchId = "batch1", state = "Completed")
val wireBatch2 = WireBatchInfo(batchId = "batch2", state = "Completed")

Expand Down

0 comments on commit ae0696c

Please sign in to comment.