class
SparkDFUtilsBridge extends AnyRef
Instance Constructors
-
new
SparkDFUtilsBridge()
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
broadcastJoinSkewed(notSkewed: DataFrame, skewed: DataFrame, joinCol: String, numRowsToBroadcast: Int): DataFrame
-
def
changeSchema(df: DataFrame, newScheme: List[String]): DataFrame
-
def
clone(): AnyRef
-
def
dedupTopN(df: DataFrame, n: Int, groupCol: Column, orderCols: List[Column]): DataFrame
-
def
dedupWithCombiner(df: DataFrame, groupCol: Column, orderByCol: Column, desc: Boolean, columnsFilter: List[String], columnsFilterKeep: Boolean): DataFrame
-
def
dedupWithOrder(df: DataFrame, groupCol: Column, orderCols: List[Column]): DataFrame
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
def
joinSkewed(dfLeft: DataFrame, dfRight: DataFrame, joinExprs: Column, numShards: Int, joinType: String): DataFrame
-
def
joinWithRange(dfSingle: DataFrame, colSingle: String, dfRange: DataFrame, colRangeStart: String, colRangeEnd: String, DECREASE_FACTOR: Long): DataFrame
-
def
joinWithRangeAndDedup(dfSingle: DataFrame, colSingle: String, dfRange: DataFrame, colRangeStart: String, colRangeEnd: String, DECREASE_FACTOR: Long, dedupSmallRange: Boolean): DataFrame
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Inherited from AnyRef
Inherited from Any
class definition so we could expose this functionality in PySpark