这是我的json数据,我正在将其发送给kafka主题,使用spark rdd进行阅读,救进卡桑德拉。
[{
"sensor": "swapSensor",
"sendtime": "2016-09-15T11:05:01.000Z",
"data": [{
"@context": "Context"
}]
}]
这是我的卡桑德拉的桌子CREATE TABLE IF NOT EXISTS event(sensor text,sendTime text,count bigint,entireJson text, PRIMARY KEY ((sensor)));
我想将整个json(原始)数据推送到表的列entireJson。
这是我的密码。
object StreamingData {
var count = 1
def main(args: Array[String]) {
val Array(brokers, topics, cassandraHost) = Array("1.11.22.50:9092", "c", "localhost")
def createSparkContext(): StreamingContext = {
val conf = new SparkConf()
.setAppName("c Events Processing")
.setMaster("local[2]")
.set("spark.cassandra.connection.host", cassandraHost)
.set("spark.cassandra.connection.keep_alive_ms", "60000") // prevent cassandra connection from being closed after every write
val sc = new SparkContext(conf)
// Create the context
val ssc = new StreamingContext(sc, Seconds(8))
val sqlContext = new SQLContext(sc);
// Kafka stream
val kafkaParams = Map[String, String]("metadata.broker.list" -> brokers)
val topicsSet = topics.split(",").toSet
val cEvents = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topicsSet).map(_._2)
cEvents.foreachRDD { rdd =>
count = count + 1
sqlContext.read.json(rdd).registerTempTable("eventTable")
val eventdf1 = sqlContext.sql("SELECT * FROM eventTable")
eventdf1.collect.foreach(println)
val eventdf = sqlContext.sql("SELECT sensor, sendtime,data.actor FROM eventTable")
eventdf.printSchema()
eventdf.map {
case (r) => (r.getString(0) + count, sendtime, count,eventdf1)
}
.saveToCassandra("c", "event", SomeColumns("sensor", "sendtime", "count","entireJson"))
}
ssc
}
}
创建一个实体类,然后将其映射到表中。使用它可以直接将RDD存储到cassandra数据库中。==检查以下链接https://github.com/datastax/spark-cassandra-connector/blob/master/doc/6_advanced_mapper.md
我试过了,它可以将原始数据保存到我的Cassandras表列中。
var rawdata = ""
for (item <- rdd.collect().toArray) {
System.out.println(item);
rawdata = item
}