I have the following Spark code on Scala:
<code> def main(args: Array[String]): Unit = {
val spark = SparkSession.builder.master("local").getOrCreate()
import spark.implicits._
val scheme = new StructType()
.add(new StructField("state_id", IntegerType, true))
.add(new StructField("state", StringType, true))
.add(new StructField("recommendation", StringType, true))
val statesDf: DataFrame = spark.read
.format("jdbc")
.schema(scheme)
.option("driver", "org.postgresql.Driver")
.option("url", "jdbc:postgresql://localhost:5432/postgres")
.option("dbtable", "public.states")
.option("user", "postgres")
.option("password", "postgres")
.load()
statesDf.map(r => Row(r.getInt(0) * 2)).show()
}
</code>
<code> def main(args: Array[String]): Unit = {
val spark = SparkSession.builder.master("local").getOrCreate()
import spark.implicits._
val scheme = new StructType()
.add(new StructField("state_id", IntegerType, true))
.add(new StructField("state", StringType, true))
.add(new StructField("recommendation", StringType, true))
val statesDf: DataFrame = spark.read
.format("jdbc")
.schema(scheme)
.option("driver", "org.postgresql.Driver")
.option("url", "jdbc:postgresql://localhost:5432/postgres")
.option("dbtable", "public.states")
.option("user", "postgres")
.option("password", "postgres")
.load()
statesDf.map(r => Row(r.getInt(0) * 2)).show()
}
</code>
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder.master("local").getOrCreate()
import spark.implicits._
val scheme = new StructType()
.add(new StructField("state_id", IntegerType, true))
.add(new StructField("state", StringType, true))
.add(new StructField("recommendation", StringType, true))
val statesDf: DataFrame = spark.read
.format("jdbc")
.schema(scheme)
.option("driver", "org.postgresql.Driver")
.option("url", "jdbc:postgresql://localhost:5432/postgres")
.option("dbtable", "public.states")
.option("user", "postgres")
.option("password", "postgres")
.load()
statesDf.map(r => Row(r.getInt(0) * 2)).show()
}
And I receive the error near “map”-function:
<code> No implicits found for parameter evidence$6: Encoder[Row]
</code>
<code> No implicits found for parameter evidence$6: Encoder[Row]
</code>
No implicits found for parameter evidence$6: Encoder[Row]
I expected “import spark.implicits._” was enough for prevent this problem, but I was wrong