How to solve this table in given below:
Solution:
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions._
import org.apache.spark.sql.functions._
val df = spark.read.format("csv").load("file:///C:/db/data2.txt")
df.show()
val df2 = df.select($"_c0",explode(array("_c1","_c2","_c3")).alias("dept"))
.na.drop()
.withColumnRenamed("_c0", "id")
df2.show()
Comments
Post a Comment