Fix E2E FunctionsTests for Spark 3.0. (#350)

This commit is contained in:
Terry Kim 2019-11-24 22:22:21 -08:00 коммит произвёл GitHub
Родитель 310c531b87
Коммит dab4cf962f
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
3 изменённых файлов: 99 добавлений и 11 удалений

Просмотреть файл

@ -2,6 +2,7 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.Spark.E2ETest.Utils;
@ -156,11 +157,15 @@ namespace Microsoft.Spark.E2ETest.IpcTests
//////////////////////////////
// Window Functions
//////////////////////////////
Assert.IsType<Column>(UnboundedPreceding());
if (SparkSettings.Version < new Version(Versions.V3_0_0))
{
// The following APIs are removed in Spark 3.0.
Assert.IsType<Column>(UnboundedPreceding());
Assert.IsType<Column>(UnboundedFollowing());
Assert.IsType<Column>(UnboundedFollowing());
Assert.IsType<Column>(CurrentRow());
Assert.IsType<Column>(CurrentRow());
}
Assert.IsType<Column>(CumeDist());
@ -534,9 +539,13 @@ namespace Microsoft.Spark.E2ETest.IpcTests
Assert.IsType<Column>(DateTrunc("mon", col));
Assert.IsType<Column>(FromUtcTimestamp(col, "GMT+1"));
if (SparkSettings.Version < new Version(Versions.V3_0_0))
{
// The following APIs are deprecated in Spark 3.0.
Assert.IsType<Column>(FromUtcTimestamp(col, "GMT+1"));
Assert.IsType<Column>(ToUtcTimestamp(col, "GMT+1"));
Assert.IsType<Column>(ToUtcTimestamp(col, "GMT+1"));
}
Assert.IsType<Column>(Window(col, "1 minute", "10 seconds", "5 seconds"));
Assert.IsType<Column>(Window(col, "1 minute", "10 seconds"));
@ -718,9 +727,13 @@ namespace Microsoft.Spark.E2ETest.IpcTests
col = MonthsBetween(col, col, false);
col = FromUtcTimestamp(col, col);
if (SparkSettings.Version < new Version(Versions.V3_0_0))
{
// The following APIs are deprecated in Spark 3.0.
col = FromUtcTimestamp(col, col);
col = ToUtcTimestamp(col, col);
col = ToUtcTimestamp(col, col);
}
col = ArraysOverlap(col, col);

Просмотреть файл

@ -6,24 +6,71 @@ using System;
namespace Microsoft.Spark
{
/// <summary>
/// Base class for custom attributes that involve the Spark version.
/// </summary>
public abstract class VersionAttribute : Attribute
{
/// <summary>
/// Constructor for VersionAttribute class.
/// </summary>
/// <param name="version">Spark version</param>
protected VersionAttribute(string version)
{
Version = new Version(version);
}
/// <summary>
/// Returns the Spark version.
/// </summary>
public Version Version { get; }
}
/// <summary>
/// Custom attribute to denote the Spark version in which an API is introduced.
/// </summary>
[AttributeUsage(AttributeTargets.All)]
public sealed class SinceAttribute : Attribute
public sealed class SinceAttribute : VersionAttribute
{
/// <summary>
/// Constructor for SinceAttribute class.
/// </summary>
/// <param name="version">Spark version</param>
public SinceAttribute(string version)
: base(version)
{
Version = version;
}
}
/// <summary>
/// Custom attribute to denote the Spark version in which an API is removed.
/// </summary>
[AttributeUsage(AttributeTargets.All)]
public sealed class RemovedAttribute : VersionAttribute
{
/// <summary>
/// Returns the Spark version.
/// Constructor for RemovedAttribute class.
/// </summary>
public string Version { get; }
/// <param name="version">Spark version</param>
public RemovedAttribute(string version)
: base(version)
{
}
}
/// <summary>
/// Custom attribute to denote the Spark version in which an API is deprecated.
/// </summary>
[AttributeUsage(AttributeTargets.All)]
public sealed class DeprecatedAttribute : VersionAttribute
{
/// <summary>
/// Constructor for DeprecatedAttribute class.
/// </summary>
/// <param name="version">Spark version</param>
public DeprecatedAttribute(string version)
: base(version)
{
}
}
}

Просмотреть файл

@ -718,7 +718,11 @@ namespace Microsoft.Spark.Sql
/// Window function: returns the special frame boundary that represents the first
/// row in the window partition.
/// </summary>
/// <remarks>
/// This API is removed in Spark 3.0.
/// </remarks>
/// <returns>Column object</returns>
[Removed(Versions.V3_0_0)]
public static Column UnboundedPreceding()
{
return ApplyFunction("unboundedPreceding");
@ -728,7 +732,11 @@ namespace Microsoft.Spark.Sql
/// Window function: returns the special frame boundary that represents the last
/// row in the window partition.
/// </summary>
/// <remarks>
/// This API is removed in Spark 3.0.
/// </remarks>
/// <returns>Column object</returns>
[Removed(Versions.V3_0_0)]
public static Column UnboundedFollowing()
{
return ApplyFunction("unboundedFollowing");
@ -738,7 +746,11 @@ namespace Microsoft.Spark.Sql
/// Window function: returns the special frame boundary that represents the current
/// row in the window partition.
/// </summary>
/// <remarks>
/// This API is removed in Spark 3.0.
/// </remarks>
/// <returns>Column object</returns>
[Removed(Versions.V3_0_0)]
public static Column CurrentRow()
{
return ApplyFunction("currentRow");
@ -2957,9 +2969,13 @@ namespace Microsoft.Spark.Sql
/// and renders that time as a timestamp in the given time zone. For example, 'GMT+1'
/// would yield '2017-07-14 03:40:00.0'.
/// </summary>
/// <remarks>
/// This API is deprecated in Spark 3.0.
/// </remarks>
/// <param name="column">Column to apply</param>
/// <param name="tz">Timezone string</param>
/// <returns>Column object</returns>
[Deprecated(Versions.V3_0_0)]
public static Column FromUtcTimestamp(Column column, string tz)
{
return ApplyFunction("from_utc_timestamp", column, tz);
@ -2970,10 +2986,14 @@ namespace Microsoft.Spark.Sql
/// and renders that time as a timestamp in the given time zone. For example, 'GMT+1'
/// would yield '2017-07-14 03:40:00.0'.
/// </summary>
/// <remarks>
/// This API is deprecated in Spark 3.0.
/// </remarks>
/// <param name="column">Column to apply</param>
/// <param name="tz">Timezone expression</param>
/// <returns>Column object</returns>
[Since(Versions.V2_4_0)]
[Deprecated(Versions.V3_0_0)]
public static Column FromUtcTimestamp(Column column, Column tz)
{
return ApplyFunction("from_utc_timestamp", column, tz);
@ -2984,9 +3004,13 @@ namespace Microsoft.Spark.Sql
/// given time zone, and renders that time as a timestamp in UTC. For example, 'GMT+1'
/// would yield '2017-07-14 01:40:00.0'.
/// </summary>
/// <remarks>
/// This API is deprecated in Spark 3.0.
/// </remarks>
/// <param name="column">Column to apply</param>
/// <param name="tz">Timezone string</param>
/// <returns>Column object</returns>
[Deprecated(Versions.V3_0_0)]
public static Column ToUtcTimestamp(Column column, string tz)
{
return ApplyFunction("to_utc_timestamp", column, tz);
@ -2997,10 +3021,14 @@ namespace Microsoft.Spark.Sql
/// given time zone, and renders that time as a timestamp in UTC. For example, 'GMT+1'
/// would yield '2017-07-14 01:40:00.0'.
/// </summary>
/// <remarks>
/// This API is deprecated in Spark 3.0.
/// </remarks>
/// <param name="column">Column to apply</param>
/// <param name="tz">Timezone expression</param>
/// <returns>Column object</returns>
[Since(Versions.V2_4_0)]
[Deprecated(Versions.V3_0_0)]
public static Column ToUtcTimestamp(Column column, Column tz)
{
return ApplyFunction("to_utc_timestamp", column, tz);