1. package com
    2. import org.apache.spark.sql.{DataFrame, SparkSession}
    3. case class User(name: String, age: Int)
    4. object DF2RDD {
    5. def main(args: Array[String]): Unit = {
    6. val spark: SparkSession = SparkSession
    7. .builder()
    8. .master("local[*]")
    9. .appName("DF2RDD")
    10. .getOrCreate()
    11. val df = spark.read.json("E:\\ZJJ_SparkSQL\\demo01\\src\\main\\resources\\users.json")
    12. df.printSchema()
    13. val rdd1 = df.rdd.map(row => {
    14. User(row.getString(1), row.getLong(0).toInt)
    15. })
    16. rdd1.collect.foreach(println)
    17. spark.close()
    18. }
    19. }

    输出

    1. User(张三,18)
    2. User(李四,15)