对象 SparkUtils
用于访问 Spark 内部函数的工具类
线性超类
排序
- 按字母顺序
- 按继承顺序
继承成员
- SparkUtils
- AnyRef
- Any
- 隐藏所有
- 显示所有
可见性
- 公共
- 所有
值成员
-
final def !=(arg0: Any): Boolean
- 定义类
- AnyRef → Any
-
final def ##(): Int
- 定义类
- AnyRef → Any
-
final def ==(arg0: Any): Boolean
- 定义类
- AnyRef → Any
- def appendColumn(schema: StructType, colName: String, dataType: DataType, nullable: Boolean = false): StructType
- def appendVectorUDTColumn(schema: StructType, colName: String, dataType: DataType = new VectorUDT, nullable: Boolean = false): StructType
-
final def asInstanceOf[T0]: T0
- 定义类
- Any
- def checkNumericType(schema: StructType, colName: String, msg: String = ""): Unit
-
def clone(): AnyRef
- 属性
- protected[lang]
- 定义类
- AnyRef
- 注解
- @throws( ... ) @native()
-
final def eq(arg0: AnyRef): Boolean
- 定义类
- AnyRef
-
def equals(arg0: Any): Boolean
- 定义类
- AnyRef → Any
-
def finalize(): Unit
- 属性
- protected[lang]
- 定义类
- AnyRef
- 注解
- @throws( classOf[java.lang.Throwable] )
-
final def getClass(): Class[_]
- 定义类
- AnyRef → Any
- 注解
- @native()
- def getNumClasses(dataset: Dataset[_], labelCol: String, maxNumClasses: Int = 100): Int
-
def hashCode(): Int
- 定义类
- AnyRef → Any
- 注解
- @native()
-
final def isInstanceOf[T0]: Boolean
- 定义类
- Any
- def isVectorType(dataType: DataType): Boolean
- def loadMetadata(path: String, sc: SparkContext, expectedClassName: String = ""): Metadata
-
final def ne(arg0: AnyRef): Boolean
- 定义类
- AnyRef
-
final def notify(): Unit
- 定义类
- AnyRef
- 注解
- @native()
-
final def notifyAll(): Unit
- 定义类
- AnyRef
- 注解
- @native()
- def saveMetadata(instance: Params, path: String, sc: SparkContext, extraMetadata: Option[JObject] = None, paramMap: Option[JValue] = None): Unit
-
final def synchronized[T0](arg0: ⇒ T0): T0
- 定义类
- AnyRef
-
def toString(): String
- 定义类
- AnyRef → Any
-
final def wait(): Unit
- 定义类
- AnyRef
- 注解
- @throws( ... )
-
final def wait(arg0: Long, arg1: Int): Unit
- 定义类
- AnyRef
- 注解
- @throws( ... )
-
final def wait(arg0: Long): Unit
- 定义类
- AnyRef
- 注解
- @throws( ... ) @native()