This commit is contained in:
Apoorve Dave 2021-03-23 11:03:11 -07:00
Родитель 40864ae09e
Коммит ad399a3fd4
4 изменённых файлов: 7 добавлений и 6 удалений

Просмотреть файл

@ -87,7 +87,7 @@ object IndexLogEntryCreator {
.Columns(index.indexedCols, index.includedCols),
IndexLogEntry.schemaString(indexSchema),
200,
Map("hasParquetAsSourceFormat" -> "true"))),
Map(IndexConstants.HAS_PARQUET_AS_SOURCE_FORMAT_PROPERTY -> "true"))),
Content(Directory.fromDirectory(indexRootPath, new FileIdTracker)),
Source(SparkPlan(sourcePlanProperties)),
Map())

Просмотреть файл

@ -19,6 +19,7 @@ package com.microsoft.hyperspace.goldstandard
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation, PartitioningAwareFileIndex}
import com.microsoft.hyperspace.HyperspaceException
import com.microsoft.hyperspace.index.LogicalPlanSignatureProvider
/**
@ -37,6 +38,7 @@ class MockSignatureProvider extends LogicalPlanSignatureProvider {
_,
_) =>
Some(location.rootPaths.head.getName)
case _ => throw HyperspaceException("Unexpected logical plan found.")
}
}
}

Просмотреть файл

@ -99,9 +99,7 @@ trait PlanStabilitySuite extends TPCDSBase with SQLHelper with Logging {
private def isApproved(dir: File, actualSimplifiedPlan: String): Boolean = {
val file = new File(dir, "simplified.txt")
val expected = FileUtils.readFileToString(file, StandardCharsets.UTF_8)
expected.replaceAll("\r", "").replaceAll("\n", "") == actualSimplifiedPlan
.replaceAll("\r", "")
.replaceAll("\n", "")
expected.replaceAll("[\\r\\n]+", "") == actualSimplifiedPlan.replaceAll("[\\r\\n]+", "")
}
/**

Просмотреть файл

@ -37,10 +37,10 @@ class TPCDS_Hyperspace extends PlanStabilitySuite {
spark.conf.set(INDEX_SYSTEM_PATH, indexSystemPath)
spark.enableHyperspace()
val indexes = Seq(
val indexDefinitions = Seq(
"dtindex;date_dim;d_date_sk;d_year",
"ssIndex;store_sales;ss_sold_date_sk;ss_customer_sk")
indexes.foreach(i => createIndex(IndexDefinition.fromString(i), spark))
indexDefinitions.foreach(i => createIndex(IndexDefinition.fromString(i), spark))
}
override def afterAll(): Unit = {
@ -71,6 +71,7 @@ object IndexDefinition {
/**
* Index definition from conf files should be provided in the following format:
* "index-name;table-name;comma-separated-indexed-cols;comma-separated-included-cols"
*
* @param definition: Index definition in string representation mentioned above.
* @return IndexDefinition.
*/