use java udf in pyspark, raise type exception
Step 1: Define a scala UDF: import org.apache.spark.sql.api.java.UDF1 import scala.collection.mutable class GetMidVal extends UDF1[mutable.WrappedArray[Double], Double] { override def call(arr: mutable.WrappedArray[Double]): Double = { val n = arr.length val arr_sorted = arr.sorted val mid: Int = (n – 1) / 2 val mid_mod: Int = mid / 2 if(mid_mod == 1) { val res: Double = […]