-
Notifications
You must be signed in to change notification settings - Fork 55
/
DataHub2OdpsDemo.scala
56 lines (45 loc) · 1.76 KB
/
DataHub2OdpsDemo.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
package com.aliyun.odps.spark.examples.streaming.datahub
import com.aliyun.datahub.model.RecordEntry
import com.aliyun.odps.spark.examples.streaming.common.SparkSessionSingleton
import org.apache.spark.sql.SparkSession
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.aliyun.datahub.DatahubUtils
object DataHub2OdpsDemo {
def transferFunc(record: RecordEntry): String = {
// 这个转化函数目前只支持把DataHub Record转成String
// 如果是需要多个字段的话, 那么需要处理一下拼接的逻辑
record.getString(1)
}
def main(args: Array[String]): Unit = {
val spark = SparkSession
.builder()
.appName("DataHubStreamingDemo")
.config("spark.hadoop.fs.oss.credentials.provider", "org.apache.hadoop.fs.aliyun.oss.AliyunStsTokenCredentialsProvider")
.config("spark.hadoop.fs.oss.ststoken.roleArn", "acs:ram::****:role/aliyunodpsdefaultrole")
.config("spark.hadoop.fs.oss.endpoint", "oss-cn-hangzhou-zmf.aliyuncs.com")
.getOrCreate()
// 设置Batch间隔时间
val ssc = new StreamingContext(spark.sparkContext, Seconds(10))
// checkpoint dir to oss
ssc.checkpoint("oss://bucket/inputdata/")
val dataStream = DatahubUtils.createStream(
ssc,
"projectName",
"topic",
"subId",
"accessId",
"accessKey",
"endPoint",
transferFunc(_),
StorageLevel.MEMORY_AND_DISK
)
dataStream.map(x => new String(x)).foreachRDD(rdd => {
val spark = SparkSessionSingleton.getInstance(rdd.sparkContext.getConf)
import spark.implicits._
rdd.toDF("id").write.mode("append").saveAsTable("test_table")
})
ssc.start()
ssc.awaitTermination()
}
}