class ContextRDD[K, V, M] extends RDD[(K, V)] with Metadata[M]
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- ContextRDD
- Metadata
- RDD
- Logging
- Serializable
- Serializable
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new ContextRDD(rdd: RDD[(K, V)], metadata: M)
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
def
++(other: RDD[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
aggregate[U](zeroValue: U)(seqOp: (U, (K, V)) ⇒ U, combOp: (U, U) ⇒ U)(implicit arg0: ClassTag[U]): U
- Definition Classes
- RDD
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
barrier(): RDDBarrier[(K, V)]
- Definition Classes
- RDD
- Annotations
- @Experimental() @Since( "2.4.0" )
-
def
cache(): ContextRDD.this.type
- Definition Classes
- RDD
-
def
cartesian[U](other: RDD[U])(implicit arg0: ClassTag[U]): RDD[((K, V), U)]
- Definition Classes
- RDD
-
def
checkpoint(): Unit
- Definition Classes
- RDD
-
def
cleanShuffleDependencies(blocking: Boolean): Unit
- Definition Classes
- RDD
- Annotations
- @Experimental() @DeveloperApi() @Since( "3.1.0" )
-
def
clearDependencies(): Unit
- Attributes
- protected
- Definition Classes
- RDD
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
coalesce(numPartitions: Int, shuffle: Boolean, partitionCoalescer: Option[PartitionCoalescer])(implicit ord: Ordering[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
def
collect[U](f: PartialFunction[(K, V), U])(implicit arg0: ClassTag[U]): RDD[U]
- Definition Classes
- RDD
-
def
collect(): Array[(K, V)]
- Definition Classes
- RDD
-
def
compute(split: Partition, context: TaskContext): Iterator[(K, V)]
- Definition Classes
- ContextRDD → RDD
-
def
context: SparkContext
- Definition Classes
- RDD
-
def
count(): Long
- Definition Classes
- RDD
-
def
countApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble]
- Definition Classes
- RDD
-
def
countApproxDistinct(relativeSD: Double): Long
- Definition Classes
- RDD
-
def
countApproxDistinct(p: Int, sp: Int): Long
- Definition Classes
- RDD
-
def
countByValue()(implicit ord: Ordering[(K, V)]): Map[(K, V), Long]
- Definition Classes
- RDD
-
def
countByValueApprox(timeout: Long, confidence: Double)(implicit ord: Ordering[(K, V)]): PartialResult[Map[(K, V), BoundedDouble]]
- Definition Classes
- RDD
-
final
def
dependencies: Seq[Dependency[_]]
- Definition Classes
- RDD
-
def
distinct(): RDD[(K, V)]
- Definition Classes
- RDD
-
def
distinct(numPartitions: Int)(implicit ord: Ordering[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
filter(f: ((K, V)) ⇒ Boolean): RDD[(K, V)]
- Definition Classes
- RDD
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
first(): (K, V)
- Definition Classes
- RDD
-
def
firstParent[U](implicit arg0: ClassTag[U]): RDD[U]
- Attributes
- protected[org.apache.spark]
- Definition Classes
- RDD
-
def
flatMap[U](f: ((K, V)) ⇒ TraversableOnce[U])(implicit arg0: ClassTag[U]): RDD[U]
- Definition Classes
- RDD
-
def
fold(zeroValue: (K, V))(op: ((K, V), (K, V)) ⇒ (K, V)): (K, V)
- Definition Classes
- RDD
-
def
foreach(f: ((K, V)) ⇒ Unit): Unit
- Definition Classes
- RDD
-
def
foreachPartition(f: (Iterator[(K, V)]) ⇒ Unit): Unit
- Definition Classes
- RDD
-
def
getCheckpointFile: Option[String]
- Definition Classes
- RDD
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getDependencies: Seq[Dependency[_]]
- Attributes
- protected
- Definition Classes
- RDD
-
final
def
getNumPartitions: Int
- Definition Classes
- RDD
- Annotations
- @Since( "1.6.0" )
-
def
getOutputDeterministicLevel: org.apache.spark.rdd.DeterministicLevel.Value
- Attributes
- protected
- Definition Classes
- RDD
- Annotations
- @DeveloperApi()
-
def
getPartitions: Array[Partition]
- Definition Classes
- ContextRDD → RDD
-
def
getPreferredLocations(split: Partition): Seq[String]
- Attributes
- protected
- Definition Classes
- RDD
-
def
getResourceProfile(): ResourceProfile
- Definition Classes
- RDD
- Annotations
- @Experimental() @Since( "3.1.0" )
-
def
getStorageLevel: StorageLevel
- Definition Classes
- RDD
-
def
glom(): RDD[Array[(K, V)]]
- Definition Classes
- RDD
-
def
groupBy[K](f: ((K, V)) ⇒ K, p: Partitioner)(implicit kt: ClassTag[K], ord: Ordering[K]): RDD[(K, Iterable[(K, V)])]
- Definition Classes
- RDD
-
def
groupBy[K](f: ((K, V)) ⇒ K, numPartitions: Int)(implicit kt: ClassTag[K]): RDD[(K, Iterable[(K, V)])]
- Definition Classes
- RDD
-
def
groupBy[K](f: ((K, V)) ⇒ K)(implicit kt: ClassTag[K]): RDD[(K, Iterable[(K, V)])]
- Definition Classes
- RDD
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
val
id: Int
- Definition Classes
- RDD
-
def
initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
def
initializeLogIfNecessary(isInterpreter: Boolean): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
intersection(other: RDD[(K, V)], numPartitions: Int): RDD[(K, V)]
- Definition Classes
- RDD
-
def
intersection(other: RDD[(K, V)], partitioner: Partitioner)(implicit ord: Ordering[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
def
intersection(other: RDD[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
lazy val
isBarrier_: Boolean
- Attributes
- protected
- Definition Classes
- RDD
- Annotations
- @transient()
-
def
isCheckpointed: Boolean
- Definition Classes
- RDD
-
def
isEmpty(): Boolean
- Definition Classes
- RDD
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
isTraceEnabled(): Boolean
- Attributes
- protected
- Definition Classes
- Logging
-
final
def
iterator(split: Partition, context: TaskContext): Iterator[(K, V)]
- Definition Classes
- RDD
-
def
keyBy[K](f: ((K, V)) ⇒ K): RDD[(K, (K, V))]
- Definition Classes
- RDD
-
def
localCheckpoint(): ContextRDD.this.type
- Definition Classes
- RDD
-
def
log: Logger
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logName: String
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
map[U](f: ((K, V)) ⇒ U)(implicit arg0: ClassTag[U]): RDD[U]
- Definition Classes
- RDD
-
def
mapPartitions[U](f: (Iterator[(K, V)]) ⇒ Iterator[U], preservesPartitioning: Boolean)(implicit arg0: ClassTag[U]): RDD[U]
- Definition Classes
- RDD
-
def
mapPartitionsWithIndex[U](f: (Int, Iterator[(K, V)]) ⇒ Iterator[U], preservesPartitioning: Boolean)(implicit arg0: ClassTag[U]): RDD[U]
- Definition Classes
- RDD
-
def
max()(implicit ord: Ordering[(K, V)]): (K, V)
- Definition Classes
- RDD
-
val
metadata: M
- Definition Classes
- ContextRDD → Metadata
-
def
min()(implicit ord: Ordering[(K, V)]): (K, V)
- Definition Classes
- RDD
-
var
name: String
- Definition Classes
- RDD
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
parent[U](j: Int)(implicit arg0: ClassTag[U]): RDD[U]
- Attributes
- protected[org.apache.spark]
- Definition Classes
- RDD
-
val
partitioner: Option[Partitioner]
- Definition Classes
- ContextRDD → RDD
-
final
def
partitions: Array[Partition]
- Definition Classes
- RDD
-
def
persist(): ContextRDD.this.type
- Definition Classes
- RDD
-
def
persist(newLevel: StorageLevel): ContextRDD.this.type
- Definition Classes
- RDD
-
def
pipe(command: Seq[String], env: Map[String, String], printPipeContext: ((String) ⇒ Unit) ⇒ Unit, printRDDElement: ((K, V), (String) ⇒ Unit) ⇒ Unit, separateWorkingDir: Boolean, bufferSize: Int, encoding: String): RDD[String]
- Definition Classes
- RDD
-
def
pipe(command: String, env: Map[String, String]): RDD[String]
- Definition Classes
- RDD
-
def
pipe(command: String): RDD[String]
- Definition Classes
- RDD
-
final
def
preferredLocations(split: Partition): Seq[String]
- Definition Classes
- RDD
-
def
randomSplit(weights: Array[Double], seed: Long): Array[RDD[(K, V)]]
- Definition Classes
- RDD
- val rdd: RDD[(K, V)]
-
def
reduce(f: ((K, V), (K, V)) ⇒ (K, V)): (K, V)
- Definition Classes
- RDD
-
def
repartition(numPartitions: Int)(implicit ord: Ordering[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
def
sample(withReplacement: Boolean, fraction: Double, seed: Long): RDD[(K, V)]
- Definition Classes
- RDD
-
def
saveAsObjectFile(path: String): Unit
- Definition Classes
- RDD
-
def
saveAsTextFile(path: String, codec: Class[_ <: CompressionCodec]): Unit
- Definition Classes
- RDD
-
def
saveAsTextFile(path: String): Unit
- Definition Classes
- RDD
-
def
setName(_name: String): ContextRDD.this.type
- Definition Classes
- RDD
-
def
sortBy[K](f: ((K, V)) ⇒ K, ascending: Boolean, numPartitions: Int)(implicit ord: Ordering[K], ctag: ClassTag[K]): RDD[(K, V)]
- Definition Classes
- RDD
-
def
sparkContext: SparkContext
- Definition Classes
- RDD
-
def
subtract(other: RDD[(K, V)], p: Partitioner)(implicit ord: Ordering[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
def
subtract(other: RDD[(K, V)], numPartitions: Int): RDD[(K, V)]
- Definition Classes
- RDD
-
def
subtract(other: RDD[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
take(num: Int): Array[(K, V)]
- Definition Classes
- RDD
-
def
takeOrdered(num: Int)(implicit ord: Ordering[(K, V)]): Array[(K, V)]
- Definition Classes
- RDD
-
def
takeSample(withReplacement: Boolean, num: Int, seed: Long): Array[(K, V)]
- Definition Classes
- RDD
-
def
toDebugString: String
- Definition Classes
- RDD
-
def
toJavaRDD(): JavaRDD[(K, V)]
- Definition Classes
- RDD
-
def
toLocalIterator: Iterator[(K, V)]
- Definition Classes
- RDD
-
def
toString(): String
- Definition Classes
- RDD → AnyRef → Any
-
def
top(num: Int)(implicit ord: Ordering[(K, V)]): Array[(K, V)]
- Definition Classes
- RDD
-
def
treeAggregate[U](zeroValue: U)(seqOp: (U, (K, V)) ⇒ U, combOp: (U, U) ⇒ U, depth: Int)(implicit arg0: ClassTag[U]): U
- Definition Classes
- RDD
-
def
treeReduce(f: ((K, V), (K, V)) ⇒ (K, V), depth: Int): (K, V)
- Definition Classes
- RDD
-
def
union(other: RDD[(K, V)]): RDD[(K, V)]
- Definition Classes
- RDD
-
def
unpersist(blocking: Boolean): ContextRDD.this.type
- Definition Classes
- RDD
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
withResources(rp: ResourceProfile): ContextRDD.this.type
- Definition Classes
- RDD
- Annotations
- @Experimental() @Since( "3.1.0" )
-
def
zip[U](other: RDD[U])(implicit arg0: ClassTag[U]): RDD[((K, V), U)]
- Definition Classes
- RDD
-
def
zipPartitions[B, C, D, V](rdd2: RDD[B], rdd3: RDD[C], rdd4: RDD[D])(f: (Iterator[(K, V)], Iterator[B], Iterator[C], Iterator[D]) ⇒ Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[C], arg2: ClassTag[D], arg3: ClassTag[V]): RDD[V]
- Definition Classes
- RDD
-
def
zipPartitions[B, C, D, V](rdd2: RDD[B], rdd3: RDD[C], rdd4: RDD[D], preservesPartitioning: Boolean)(f: (Iterator[(K, V)], Iterator[B], Iterator[C], Iterator[D]) ⇒ Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[C], arg2: ClassTag[D], arg3: ClassTag[V]): RDD[V]
- Definition Classes
- RDD
-
def
zipPartitions[B, C, V](rdd2: RDD[B], rdd3: RDD[C])(f: (Iterator[(K, V)], Iterator[B], Iterator[C]) ⇒ Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[C], arg2: ClassTag[V]): RDD[V]
- Definition Classes
- RDD
-
def
zipPartitions[B, C, V](rdd2: RDD[B], rdd3: RDD[C], preservesPartitioning: Boolean)(f: (Iterator[(K, V)], Iterator[B], Iterator[C]) ⇒ Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[C], arg2: ClassTag[V]): RDD[V]
- Definition Classes
- RDD
-
def
zipPartitions[B, V](rdd2: RDD[B])(f: (Iterator[(K, V)], Iterator[B]) ⇒ Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[V]): RDD[V]
- Definition Classes
- RDD
-
def
zipPartitions[B, V](rdd2: RDD[B], preservesPartitioning: Boolean)(f: (Iterator[(K, V)], Iterator[B]) ⇒ Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[V]): RDD[V]
- Definition Classes
- RDD
-
def
zipWithIndex(): RDD[((K, V), Long)]
- Definition Classes
- RDD
-
def
zipWithUniqueId(): RDD[((K, V), Long)]
- Definition Classes
- RDD