Skip to content

Commit

Permalink
Merge 00e4c44 into 21e1772
Browse files Browse the repository at this point in the history
  • Loading branch information
wzorgdrager committed Apr 1, 2019
2 parents 21e1772 + 00e4c44 commit 963aa95
Show file tree
Hide file tree
Showing 25 changed files with 2,311 additions and 23 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ In order to run the documentation locally, make sure to have [Ruby and Rubygem](
By default the documentation will be served at [http://127.0.0.1:4000](http://127.0.0.1:4000)

## Use orchestration tools
The orchestration tools are located under [`/tools`](/tools). They depends on:
The orchestration tools are located under [`/tools`](/tools). They depend on:
- Python
- Docker
- Redis
28 changes: 16 additions & 12 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ lazy val root = (project in file("."))
pluginMongodb,
pluginElasticSearch,
pluginGitHub,
pluginRabbitMQ)
pluginRabbitMQ,
pluginGHTorrent)

lazy val core = (project in file("codefeedr-core"))
.settings(
Expand Down Expand Up @@ -100,9 +101,7 @@ lazy val pluginMongodb = (project in file("codefeedr-plugins/codefeedr-mongodb")
dependencies.embeddedMongo
)
)
.dependsOn(
core
)
.dependsOn(core)

lazy val pluginElasticSearch = (project in file("codefeedr-plugins/codefeedr-elasticsearch"))
.settings(
Expand All @@ -113,9 +112,7 @@ lazy val pluginElasticSearch = (project in file("codefeedr-plugins/codefeedr-ela
dependencies.flinkElasticSearch
)
)
.dependsOn(
core
)
.dependsOn(core)

lazy val pluginGitHub = (project in file("codefeedr-plugins/codefeedr-github"))
.settings(
Expand All @@ -130,9 +127,7 @@ lazy val pluginGitHub = (project in file("codefeedr-plugins/codefeedr-github"))
dependencies.json4sExt
)
)
.dependsOn(
core
)
.dependsOn(core)

lazy val pluginRabbitMQ = (project in file("codefeedr-plugins/codefeedr-rabbitmq"))
.settings(
Expand All @@ -144,9 +139,18 @@ lazy val pluginRabbitMQ = (project in file("codefeedr-plugins/codefeedr-rabbitmq
//dependencies.embeddedRabbitMQ
)
)
.dependsOn(core
)
.dependsOn(core)

lazy val pluginGHTorrent = (project in file("codefeedr-plugins/codefeedr-ghtorrent"))
.settings(
name := pluginPrefix + "ghtorrent",
settings,
assemblySettings,
libraryDependencies ++= commonDependencies ++ Seq(
dependencies.flinkRabbitMQ,
//dependencies.embeddedRabbitMQ
)
).dependsOn(core)

lazy val dependencies =
new {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.codefeedr

import scala.language.implicitConversions
import scala.collection.JavaConverters._

/** Implicit conversions for Properties. */
object Properties {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ import org.apache.flink.streaming.connectors.kafka.{
}
import org.apache.kafka.clients.admin.{AdminClient, AdminClientConfig, NewTopic}
import org.apache.logging.log4j.scala.Logging
import org.codefeedr.Properties._
import org.codefeedr.buffer.serialization.schema_exposure.{
RedisSchemaExposer,
SchemaExposer,
Expand All @@ -40,6 +39,7 @@ import org.codefeedr.pipeline.Pipeline
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._
import org.codefeedr.Properties._

/** Holds Kafka property names. */
object KafkaBuffer {
Expand All @@ -53,6 +53,12 @@ object KafkaBuffer {
val SCHEMA_EXPOSURE_SERVICE = "SCHEMA_EXPOSURE_SERVICE"
val SCHEMA_EXPOSURE_HOST = "SCHEMA_EXPOSURE_HOST"
val SCHEMA_EXPOSURE_DESERIALIZATION = "SCHEMA_EXPOSURE_SERIALIZATION"

//PARTITIONS, REPLICAS AND COMPRESSION
val AMOUNT_OF_PARTITIONS = "AMOUNT_OF_PARTITONS"
val AMOUNT_OF_REPLICAS = "AMOUNT_OF_REPLICAS"
val COMPRESSION_TYPE = "compression.type"

}

/** The implementation for the Kafka buffer. This buffer is the default.
Expand Down Expand Up @@ -85,6 +91,11 @@ class KafkaBuffer[T <: Serializable with AnyRef: ClassTag: TypeTag](
val SCHEMA_EXPOSURE = false
val SCHEMA_EXPOSURE_SERVICE = "redis"
val SCHEMA_EXPOSURE_HOST = "redis://localhost:6379"

//PARTITIONS, REPLICAS AND COMPRESSION
val AMOUNT_OF_PARTITIONS = 1
val AMOUNT_OF_REPLICAS = 1
val COMPRESSION_TYPE = "none"
}

/** Get a Kafka Consumer as source for a stage.
Expand Down Expand Up @@ -118,6 +129,12 @@ class KafkaBuffer[T <: Serializable with AnyRef: ClassTag: TypeTag](
exposeSchema()
}

// Make sure the topic already exists, otherwise create it.
checkAndCreateSubject(
topic,
properties
.getOrElse[String](KafkaBuffer.BROKER, KafkaBufferDefaults.BROKER))

// Create Kafka producer.
val producer =
new FlinkKafkaProducer[T](topic, getSerializer, getKafkaProperties)
Expand All @@ -138,6 +155,7 @@ class KafkaBuffer[T <: Serializable with AnyRef: ClassTag: TypeTag](
kafkaProp.put("auto.commit.interval.ms",
KafkaBufferDefaults.AUTO_COMMIT_INTERVAL_MS)
kafkaProp.put("enable.auto.commit", KafkaBufferDefaults.ENABLE_AUTO_COMMIT)
kafkaProp.put("compression.type", KafkaBufferDefaults.COMPRESSION_TYPE)
kafkaProp.put("group.id", groupId)

properties.getContents.foreach {
Expand Down Expand Up @@ -168,7 +186,16 @@ class KafkaBuffer[T <: Serializable with AnyRef: ClassTag: TypeTag](
if (!alreadyCreated) {
// The topic configuration will probably be overwritten by the producer.
logger.info(s"Topic $topic doesn't exist yet, now creating it.")
val newTopic = new NewTopic(topic, 1, 1)
val newTopic = new NewTopic(
topic,
properties.getOrElse[Int](
KafkaBuffer.AMOUNT_OF_PARTITIONS,
KafkaBufferDefaults.AMOUNT_OF_PARTITIONS)(_.toInt),
properties
.getOrElse[Int](KafkaBuffer.AMOUNT_OF_REPLICAS,
KafkaBufferDefaults.AMOUNT_OF_REPLICAS)(_.toInt)
.asInstanceOf[Short]
)
createTopic(adminClient, newTopic)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ class KafkaBufferTest
correctDefaultProperties.put("auto.commit.interval.ms", "100")
correctDefaultProperties.put("enable.auto.commit", "true")
correctDefaultProperties.put("group.id", "test")
correctDefaultProperties.put("compression.type", "none")
assert(kafkaBuffer.getKafkaProperties == correctDefaultProperties)

val properties = new org.codefeedr.Properties()
Expand All @@ -169,6 +170,7 @@ class KafkaBufferTest
correctProperties.put("enable.auto.commit", "true")
correctProperties.put("group.id", "test")
correctProperties.put("some.other.property", "some-value")
correctProperties.put("compression.type", "none")
assert(kafkaBuffer2.getKafkaProperties == correctProperties)

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,12 @@ import org.codefeedr.stages.OutputStage
import org.elasticsearch.action.index.IndexRequest
import org.elasticsearch.client.{Requests, RestClientBuilder}
import org.elasticsearch.common.xcontent.XContentType
import org.json4s.NoTypeHints
import org.json4s.{FieldSerializer, NoTypeHints}
import org.json4s.ext.JavaTimeSerializers
import org.json4s.jackson.Serialization
import collection.JavaConversions._
import org.json4s.FieldSerializer._

import collection.JavaConversions._
import scala.reflect.{ClassTag, Manifest}

/**
Expand Down Expand Up @@ -92,7 +93,7 @@ class ElasticSearchOutput[T <: Serializable with AnyRef: ClassTag: Manifest](
if (servers.isEmpty) {
logger.info(
"Transport address set is empty. Using localhost with default port 9300.")
transportAddresses.add(new HttpHost("localhost", 9300, "http"))
transportAddresses.add(new HttpHost("localhost", 9200, "http"))
}

for (server <- servers) {
Expand All @@ -119,7 +120,13 @@ private class ElasticSearchSink[
T <: Serializable with AnyRef: ClassTag: Manifest](index: String)
extends ElasticsearchSinkFunction[T] {

implicit lazy val formats = Serialization.formats(NoTypeHints) ++ JavaTimeSerializers.all
// ES records are not allowed to have _id fields, so we replace it with idd.
val esSerializer = FieldSerializer[T](
renameTo("_id", "idd"),
renameFrom("idd", "_id")
)

implicit lazy val formats = Serialization.formats(NoTypeHints) ++ JavaTimeSerializers.all + esSerializer

def createIndexRequest(element: T): IndexRequest = {
val bytes = serialize(element)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class ElasticSearchOutputTest extends FunSuite with MockitoSugar {

assert(addresses.size() == 1)
assert(addresses.get(0).getHostName == "localhost")
assert(addresses.get(0).getPort == 9300)
assert(addresses.get(0).getPort == 9200)
}

test("Should add configured hosts") {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
evt.commitcomment.insert
evt.create.insert
evt.delete.insert
evt.deployment.insert
evt.deploymentstatus.insert
evt.fork.insert
evt.gollum.insert
evt.issuecomment.insert
evt.issues.insert
evt.member.insert
evt.membership.insert
evt.pagebuild.insert
evt.public.insert
evt.pullrequest.insert
evt.pullrequestreviewcomment.insert
evt.push.insert
evt.release.insert
evt.repository.insert
evt.status.insert
evt.teamadd.insert
evt.watch.insert
ent.commits.insert
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.codefeedr.plugins.ghtorrent.protocol

object GHTorrent {

/** Represents a record as retrieved from GHTorrent.
*
* @param routingKey the routing key (e.g. evt.push.insert)
* @param contents the content of the queue record.
*/
case class Record(routingKey: String, contents: String)

/** Every GHTorrent record has an _id objet.
*
* @param `$oid` the object id.
*/
case class _id(`$oid`: String)

/** Every event should subtype from this class. */
abstract class Event() extends Serializable
}

0 comments on commit 963aa95

Please sign in to comment.