Is it possible to combine GraphX and DataFrames? I want for every node in the Graph an own DataFrame. I know that GraphX and DataFrame extends RDD and nested RDDs are not possible and SparkContext is not Serializable. But in Spark 2.0.0 I saw that SparkSession is Serializable. I've tried it, but it's still not working. I've also tried to store the DataFrames global in an Array. But I cant access the Array in a workernode. Ignore the methods sendMsg and merge:
object Main{
def main(args: Array[String]) : Unit = {
val spark = SparkSession
.builder
.appName("ScalaGraphX_SQL")
.master("spark://home:7077")
.enableHiveSupport()
.getOrCreate()
val sc = spark.sparkContext
val node_pair : RDD[(Array[String],Long)] = sc.textFile(args(0)).map(l=>l.split(" ")).zipWithIndex()
//set array size
Tables.tables = new Array[Dataset[Row]](node_pair.count().toInt)
//insert dataframe inside array tables
node_pair.collect().foreach{ case (arr,l) => {
val fields = arr.takeRight(arr.length-2).map(fieldName => StructField(fieldName, BooleanType, nullable = true))
val schema = StructType(fields)
val rows = new util.ArrayList[Row]
Tables.tables{l.toInt} = spark.createDataFrame(rows, schema)
//val f =
}
}
//create vertices
val vertices : RDD[(VertexId,TreeNode)]= node_pair.map{ case (arr,l) => {
(l,new TreeNode(l,false))
}
}
//create edges
val edges : RDD[Edge[Boolean]] = node_pair
.filter{ case (arr,l) => arr(0).toLong != -1}
.map{ case (arr,l) => Edge(l,arr(0).toLong,true)
}
var init_node : TreeNode = new TreeNode(-1,false)
val graph = Graph(vertices,edges,init_node)
val graph_pregel = Pregel(graph,init_node,Int.MaxValue,EdgeDirection.Out)(vProg,sendMsg,merge)
graph_pregel.vertices.collect().foreach(v => println(v._2.index))
}
def vProg(id:VertexId, act: TreeNode, other: TreeNode): TreeNode = {
println(Tables.tables{act.index.toInt})
act
}
def sendMsg(et : EdgeTriplet[TreeNode,Boolean]) : Iterator[(VertexId, TreeNode)] = {
if(et.srcAttr.v){
println(et.srcId + "--->" + et.dstId)
Iterator((et.dstId,et.srcAttr))
}else{
//println(et.srcId + "-/->" + et.dstId)
Iterator.empty
}
}
def merge(n1:TreeNode, n2:TreeNode): TreeNode = {
n1
}
}
object Tables extends Serializable{
var tables : scala.Array[Dataset[Row]] = null
}
class TreeNode(val index:Long, var v: Boolean) extends Serializable {
}
Maybe there is a possibility to access the global array with RDDs? Or someone has an other solution for this problem?