New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Fixed runtime issue. #1
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1 @@ | ||
spark-submit --master local[6] target/scala-2.10/cassandra-connector-failure-demo-assembly-1.0.jar | ||
spark-submit --master local[6] target/scala-2.10/cassandra-connector-success-demo-assembly-1.1.jar | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. That's right, I went there... |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
# Licensed to the Apache Software Foundation (ASF) under one | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Just added for SBT run to see logging aside from the log hailstorm in spark-submit |
||
# or more contributor license agreements. See the NOTICE file | ||
# distributed with this work for additional information | ||
# regarding copyright ownership. The ASF licenses this file | ||
# to you under the Apache License, Version 2.0 (the | ||
# "License"); you may not use this file except in compliance | ||
# with the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
|
||
# for production, you should probably set pattern to %c instead of %l. | ||
# (%l is slower.) | ||
|
||
# output messages into a rolling log file as well as stdout | ||
log4j.rootLogger=WARN,stdout | ||
|
||
# stdout | ||
log4j.appender.stdout=org.apache.log4j.ConsoleAppender | ||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.stdout.layout.ConversionPattern=[%F:%L] : %m%n | ||
|
||
log4j.logger.fake=DEBUG | ||
log4j.logger.com.datastax.spark.connector=WARN | ||
log4j.logger.org.apache=WARN |
This file was deleted.
This file was deleted.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
package fake | ||
|
||
import com.datastax.spark.connector._ | ||
import org.apache.spark.streaming._ | ||
import com.datastax.spark.connector.streaming._ | ||
import org.apache.spark.{SparkConf, SparkContext} | ||
|
||
object FakeDataStreamer { | ||
|
||
var failureHandler = (a :String, b: String ) => {1} | ||
|
||
def main(args: Array[String]) { | ||
if(args.length>0 && args(0) == "d"){ | ||
println("-------------Attach debugger now!--------------") | ||
Thread.sleep(8000) | ||
} | ||
|
||
val config = new SparkConf() | ||
.setAppName("Fake Data Stream") | ||
.set("spark.cassandra.connection.host", "127.0.0.1")// get from args | ||
|
||
val streamContext = new StreamingContext(config, Seconds(3) ) | ||
|
||
generateReceiverStream(streamContext) | ||
|
||
streamContext.start() | ||
streamContext.awaitTermination() | ||
} | ||
|
||
def generateReceiverStream(ssc: StreamingContext): Unit = { | ||
|
||
val customReceiverStream = ssc.receiverStream[String](new FakeDataReceiver()) | ||
|
||
val vsStream = customReceiverStream.filter(_.nonEmpty).map(FakeMessage(_)) | ||
|
||
// your life would be much easier if you didn't camelCase your table field names in cql but instead just did | ||
// message_id, message_content, then you would not need to specify SomeColumns with fieldname as x etc. | ||
// That said, I did create a ticket for us to handle those that do, so you don't have to specify 'as' :-) | ||
val columnsForCamelCaseAlias = SomeColumns("messageId" as "messageId", "messageContent" as "messageContent", "timestamp" as "timestamp") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is the main issue sadly |
||
vsStream.saveToCassandra("fake_data","messages", columnsForCamelCaseAlias) | ||
vsStream.saveToCassandra("fake_data","latest_message", columnsForCamelCaseAlias) | ||
|
||
vsStream.print | ||
|
||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
package fake | ||
|
||
import java.util.Date | ||
|
||
/** | ||
* Created by cwheeler on 5/18/15. | ||
*/ | ||
case class FakeMessage(messageId: String, messageContent: String, timestamp: Date) extends Serializable | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Cleaned up your 'class' and made it a scala case class with the appropriate 'apply' function |
||
|
||
object FakeMessage { | ||
|
||
def apply(record: String): FakeMessage = { | ||
val arr = record.split(",") | ||
FakeMessage(arr(0),arr(1), new Date()) | ||
} | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
this helped, it's the version of scala we build 1.2.1 against.