Make dependency classes public - used by spark

This commit is contained in:
Denny 2012-10-02 19:04:23 -07:00
Родитель 4d9f4b01af
Коммит b7a913e1fa
1 изменённых файлов: 5 добавлений и 5 удалений

Просмотреть файл

@ -1,23 +1,23 @@
package spark
private[spark] abstract class Dependency[T](val rdd: RDD[T], val isShuffle: Boolean) extends Serializable
abstract class Dependency[T](val rdd: RDD[T], val isShuffle: Boolean) extends Serializable
private[spark] abstract class NarrowDependency[T](rdd: RDD[T]) extends Dependency(rdd, false) {
abstract class NarrowDependency[T](rdd: RDD[T]) extends Dependency(rdd, false) {
def getParents(outputPartition: Int): Seq[Int]
}
private[spark] class ShuffleDependency[K, V, C](
class ShuffleDependency[K, V, C](
val shuffleId: Int,
@transient rdd: RDD[(K, V)],
val aggregator: Aggregator[K, V, C],
val partitioner: Partitioner)
extends Dependency(rdd, true)
private[spark] class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
override def getParents(partitionId: Int) = List(partitionId)
}
private[spark] class RangeDependency[T](rdd: RDD[T], inStart: Int, outStart: Int, length: Int)
class RangeDependency[T](rdd: RDD[T], inStart: Int, outStart: Int, length: Int)
extends NarrowDependency[T](rdd) {
override def getParents(partitionId: Int) = {