Skip to content

Commit

Permalink
Merge branch 'develop' into feature/add_new_topic_array
Browse files Browse the repository at this point in the history
  • Loading branch information
bpathak-ons committed Mar 17, 2022
2 parents 5fb6330 + 5868cf6 commit e1479c4
Show file tree
Hide file tree
Showing 8 changed files with 13 additions and 7 deletions.
2 changes: 1 addition & 1 deletion cmd/producer/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ func scanEvent(scanner *bufio.Scanner) *models.ContentPublished {

return &models.ContentPublished{
URI: uri,
DataType: "Reviewed-uris",
DataType: "legacy",
TraceID: "054435ded",
}
}
2 changes: 2 additions & 0 deletions event/consumer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ var testEvent = models.ContentPublished{
URI: "testUri",
DataType: "Thing",
CollectionID: "Col123",
JobID: "",
SearchIndex: "ONS",
TraceID: "10110011100010000",
}

Expand Down
2 changes: 1 addition & 1 deletion features/publish_data.feature
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@ Feature: Data extractor should listen to the relevant topic and publish extracte
Scenario: When searching for the extracted data I get the expected result
Given I send a kafka event to content published topic
| URI | DataType | CollectionID |
| some_uri | Reviewed-uris | 123 |
| some_uri | legacy | 123 |
When The kafka event is processed
Then I should receive the published data
2 changes: 1 addition & 1 deletion features/steps/steps.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ func (c *Component) sendKafkafkaEvent(table *godog.Table) error {

func (c *Component) processKafkaEvent() error {
c.inputData = models.ZebedeeData{
DataType: "Reviewed-uris",
DataType: "legacy",
Description: models.Description{
CDID: "123",
DatasetID: "456",
Expand Down
4 changes: 2 additions & 2 deletions handler/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ import (

const (
OnsSearchIndex = "ONS"
ZebedeeDataType = "Reviewed-uris"
DatasetDataType = "Dataset-uris"
ZebedeeDataType = "legacy"
DatasetDataType = "datasets"
)

// ContentPublishedHandler struct to hold handle for config with zebedee, datasetAPI client and the producer
Expand Down
4 changes: 2 additions & 2 deletions handler/handler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,13 @@ var (

testZebedeeEvent = models.ContentPublished{
URI: "testZebdeeUri",
DataType: "Reviewed-uris",
DataType: "legacy",
CollectionID: "testZebdeeCollectionID",
}

testDatasetEvent = models.ContentPublished{
URI: "/datasets/cphi01/editions/timeseries/versions/version/metadata",
DataType: "Dataset-uris",
DataType: "datasets",
CollectionID: "testDatasetApiCollectionID",
}

Expand Down
2 changes: 2 additions & 0 deletions models/event.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ type ContentPublished struct {
URI string `avro:"uri"`
DataType string `avro:"data_type"`
CollectionID string `avro:"collection_id"`
JobID string `avro:"job_id"`
SearchIndex string `avro:"search_index"`
TraceID string `avro:"trace_id"`
}

Expand Down
2 changes: 2 additions & 0 deletions schema/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ var contentPublished = `{
{"name": "uri", "type": "string", "default": ""},
{"name": "data_type", "type": "string", "default": ""},
{"name": "collection_id", "type": "string", "default": ""},
{"name": "job_id", "type": "string", "default": ""},
{"name": "search_index", "type": "string", "default": ""},
{"name": "trace_id", "type": "string", "default": ""}
]
}`
Expand Down

0 comments on commit e1479c4

Please sign in to comment.