I am trying to convert RDD[Row]
to RDD[Vector]
but it throws exception stating
java.lang.ClassCastException: org.apache.spark.ml.linalg.DenseVector cannot be cast to org.apache.spark.mllib.linalg.Vector
My code is
val spark = SparkSession.builder().master("local").getOrCreate()
val df = spark.range(0,10).withColumn("uniform" , rand(10L)).withColumn("normal1" , randn(10L)).withColumn("normal2" , randn(11L))
val assembler = new VectorAssembler().setInputCols(Array("uniform" ,"normal1","normal2")).setOutputCol("features")
val dfVec = assembler.transform(df)
val dfOutlier = dfVec.select("id" , "features").union( spark.createDataFrame(Seq( (10 , org.apache.spark.mllib.linalg.Vectors.dense(3,3,3)) )) )
dfOutlier.show(false)
val scaler = new StandardScaler().setInputCol("features").setOutputCol("Scaled").setWithStd(true).setWithMean(true)
val model = scaler.fit(dfOutlier).transform(dfOutlier)
model.show(false)
val dfVecRdd = model.select("Scaled").rdd.map(_(0).asInstanceOf[org.apache.spark.mllib.linalg.Vector] )
When I perform action on dfVecRdd
, exception is raised. How can I solve this?