зеркало из https://github.com/microsoft/spark.git
Make more stuff private[spark]
This commit is contained in:
Родитель
87f4451f20
Коммит
6cf5dffc72
|
@ -93,6 +93,7 @@ trait AccumulableParam[R, T] extends Serializable {
|
|||
def zero(initialValue: R): R
|
||||
}
|
||||
|
||||
private[spark]
|
||||
class GrowableAccumulableParam[R <% Growable[T] with TraversableOnce[T] with Serializable, T]
|
||||
extends AccumulableParam[R,T] {
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ private[spark] class BlockRDDSplit(val blockId: String, idx: Int) extends Split
|
|||
val index = idx
|
||||
}
|
||||
|
||||
|
||||
private[spark]
|
||||
class BlockRDD[T: ClassManifest](sc: SparkContext, @transient blockIds: Array[String])
|
||||
extends RDD[T](sc) {
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ class CartesianSplit(idx: Int, val s1: Split, val s2: Split) extends Split with
|
|||
override val index: Int = idx
|
||||
}
|
||||
|
||||
private[spark]
|
||||
class CartesianRDD[T: ClassManifest, U:ClassManifest](
|
||||
sc: SparkContext,
|
||||
rdd1: RDD[T],
|
||||
|
@ -45,4 +46,4 @@ class CartesianRDD[T: ClassManifest, U:ClassManifest](
|
|||
def getParents(id: Int): Seq[Int] = List(id % numSplitsInRdd2)
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -383,6 +383,7 @@ abstract class RDD[T: ClassManifest](@transient sc: SparkContext) extends Serial
|
|||
}
|
||||
}
|
||||
|
||||
private[spark]
|
||||
class MappedRDD[U: ClassManifest, T: ClassManifest](
|
||||
prev: RDD[T],
|
||||
f: T => U)
|
||||
|
@ -393,6 +394,7 @@ class MappedRDD[U: ClassManifest, T: ClassManifest](
|
|||
override def compute(split: Split) = prev.iterator(split).map(f)
|
||||
}
|
||||
|
||||
private[spark]
|
||||
class FlatMappedRDD[U: ClassManifest, T: ClassManifest](
|
||||
prev: RDD[T],
|
||||
f: T => TraversableOnce[U])
|
||||
|
@ -403,18 +405,21 @@ class FlatMappedRDD[U: ClassManifest, T: ClassManifest](
|
|||
override def compute(split: Split) = prev.iterator(split).flatMap(f)
|
||||
}
|
||||
|
||||
private[spark]
|
||||
class FilteredRDD[T: ClassManifest](prev: RDD[T], f: T => Boolean) extends RDD[T](prev.context) {
|
||||
override def splits = prev.splits
|
||||
override val dependencies = List(new OneToOneDependency(prev))
|
||||
override def compute(split: Split) = prev.iterator(split).filter(f)
|
||||
}
|
||||
|
||||
private[spark]
|
||||
class GlommedRDD[T: ClassManifest](prev: RDD[T]) extends RDD[Array[T]](prev.context) {
|
||||
override def splits = prev.splits
|
||||
override val dependencies = List(new OneToOneDependency(prev))
|
||||
override def compute(split: Split) = Array(prev.iterator(split).toArray).iterator
|
||||
}
|
||||
|
||||
private[spark]
|
||||
class MapPartitionsRDD[U: ClassManifest, T: ClassManifest](
|
||||
prev: RDD[T],
|
||||
f: Iterator[T] => Iterator[U])
|
||||
|
@ -430,6 +435,7 @@ class MapPartitionsRDD[U: ClassManifest, T: ClassManifest](
|
|||
* closure. This can be used to generate or collect partition specific
|
||||
* information such as the number of tuples in a partition.
|
||||
*/
|
||||
private[spark]
|
||||
class MapPartitionsWithSplitRDD[U: ClassManifest, T: ClassManifest](
|
||||
prev: RDD[T],
|
||||
f: (Int, Iterator[T]) => Iterator[U])
|
||||
|
|
|
@ -3,7 +3,6 @@ package spark.partial
|
|||
/**
|
||||
* A Double with error bars on it.
|
||||
*/
|
||||
private[spark]
|
||||
class BoundedDouble(val mean: Double, val confidence: Double, val low: Double, val high: Double) {
|
||||
override def toString(): String = "[%.3f, %.3f]".format(low, high)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package spark.partial
|
||||
|
||||
private[spark] class PartialResult[R](initialVal: R, isFinal: Boolean) {
|
||||
class PartialResult[R](initialVal: R, isFinal: Boolean) {
|
||||
private var finalValue: Option[R] = if (isFinal) Some(initialVal) else None
|
||||
private var failure: Option[Exception] = None
|
||||
private var completionHandler: Option[R => Unit] = None
|
||||
|
|
Загрузка…
Ссылка в новой задаче