小编skm*_*000的帖子

toDF 的问题,值 toDF 不是 org.apache.spark.rdd.RDD 的成员

我附加了错误“值 toDF 不是 org.apache.spark.rdd.RDD 的成员”的代码片段。我正在使用 Scala 2.11.8 和 spark 2.0.0。你能帮我解决 API toDF() 的这个问题吗?

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SQLContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.functions._

object HHService {
    case class Services(
    uhid:String,
    locationid:String,
    doctorid:String,
    billdate:String,
    servicename:String,
    servicequantity:String,
    starttime:String,
    endtime:String,
    servicetype:String,
    servicecategory:String,
    deptname:String
    )

    def toService = (p: Seq[String]) => Services(p(0), p(1),p(2),p(3),p(4),p(5),p(6),p(7),p(8),p(9),p(10))

    def main(args: Array[String]){
        val warehouseLocation = "file:${system:user.dir}/spark-warehouse"
        val spark = SparkSession
            .builder
            .appName(getClass.getSimpleName)
            .config("spark.sql.warehouse.dir", warehouseLocation)
        .enableHiveSupport()
            .getOrCreate()
        val sc = spark.sparkContext 

        val sqlContext = spark.sqlContext;

        import spark.implicits._
        import sqlContext.implicits._

        val …
Run Code Online (Sandbox Code Playgroud)

dataframe apache-spark-sql

3
推荐指数
1
解决办法
5298
查看次数

标签 统计

apache-spark-sql ×1

dataframe ×1