Signed-off-by: Derrick Stolee <dstolee@microsoft.com>
This commit is contained in:
Derrick Stolee 2019-12-17 07:00:00 -05:00
Родитель e8f0a0fd10
Коммит 47b898d0ca
3 изменённых файлов: 0 добавлений и 347 удалений

Просмотреть файл

@ -1,124 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
namespace Scalar.Common.NetworkStreams
{
/// <summary>
/// Deserializer for packs and indexes for prefetch packs.
/// </summary>
public class PrefetchPacksDeserializer
{
private const int NumPackHeaderBytes = 3 * sizeof(long);
private static readonly byte[] PrefetchPackExpectedHeader =
new byte[]
{
(byte)'G', (byte)'P', (byte)'R', (byte)'E', (byte)' ', // Magic
1 // Version
};
private readonly Stream source;
public PrefetchPacksDeserializer(Stream source)
{
this.source = source;
}
/// <summary>
/// Read all the packs and indexes from the source stream and return a <see cref="PackAndIndex"/> for each pack
/// and index. Caller must consume pack stream fully before the index stream.
/// </summary>
public IEnumerable<PackAndIndex> EnumeratePacks()
{
this.ValidateHeader();
byte[] buffer = new byte[NumPackHeaderBytes];
int packCount = this.ReadPackCount(buffer);
for (int i = 0; i < packCount; i++)
{
long timestamp;
long packLength;
long indexLength;
this.ReadPackHeader(buffer, out timestamp, out packLength, out indexLength);
using (Stream packData = new RestrictedStream(this.source, packLength))
using (Stream indexData = indexLength > 0 ? new RestrictedStream(this.source, indexLength) : null)
{
yield return new PackAndIndex(packData, indexData, timestamp);
}
}
}
/// <summary>
/// Read the ushort pack count
/// </summary>
private ushort ReadPackCount(byte[] buffer)
{
StreamUtil.TryReadGreedy(this.source, buffer, 0, 2);
return BitConverter.ToUInt16(buffer, 0);
}
/// <summary>
/// Parse the current pack header
/// </summary>
private void ReadPackHeader(
byte[] buffer,
out long timestamp,
out long packLength,
out long indexLength)
{
int totalBytes = StreamUtil.TryReadGreedy(
this.source,
buffer,
0,
NumPackHeaderBytes);
if (totalBytes == NumPackHeaderBytes)
{
timestamp = BitConverter.ToInt64(buffer, 0);
packLength = BitConverter.ToInt64(buffer, 8);
indexLength = BitConverter.ToInt64(buffer, 16);
}
else
{
throw new RetryableException(
string.Format(
"Reached end of stream before expected {0} bytes. Got {1}. Buffer: {2}",
NumPackHeaderBytes,
totalBytes,
SHA1Util.HexStringFromBytes(buffer)));
}
}
private void ValidateHeader()
{
byte[] headerBuf = new byte[PrefetchPackExpectedHeader.Length];
StreamUtil.TryReadGreedy(this.source, headerBuf, 0, headerBuf.Length);
if (!headerBuf.SequenceEqual(PrefetchPackExpectedHeader))
{
throw new InvalidDataException("Unexpected header: " + Encoding.UTF8.GetString(headerBuf));
}
}
public class PackAndIndex
{
public PackAndIndex(Stream packStream, Stream idxStream, long timestamp)
{
this.PackStream = packStream;
this.IndexStream = idxStream;
this.Timestamp = timestamp;
this.UniqueId = Guid.NewGuid().ToString("N");
}
public Stream PackStream { get; }
public Stream IndexStream { get; }
public long Timestamp { get; }
public string UniqueId { get; }
}
}
}

Просмотреть файл

@ -1,51 +1,9 @@
using System;
using System.IO;
using System.Linq;
namespace Scalar.Common
{
public static class Paths
{
public static string GetRoot(string startingDirectory, string rootName)
{
startingDirectory = startingDirectory.TrimEnd(Path.DirectorySeparatorChar);
DirectoryInfo dirInfo;
try
{
dirInfo = new DirectoryInfo(startingDirectory);
}
catch (Exception)
{
return null;
}
while (dirInfo != null)
{
if (dirInfo.Exists)
{
DirectoryInfo[] dotScalarDirs = new DirectoryInfo[0];
try
{
dotScalarDirs = dirInfo.GetDirectories(rootName);
}
catch (IOException)
{
}
if (dotScalarDirs.Count() == 1)
{
return dirInfo.FullName;
}
}
dirInfo = dirInfo.Parent;
}
return null;
}
public static string ConvertPathToGitFormat(string path)
{
return path.Replace(Path.DirectorySeparatorChar, ScalarConstants.GitPathSeparator);

Просмотреть файл

@ -1,181 +0,0 @@
using NUnit.Framework;
using Scalar.Common.NetworkStreams;
using Scalar.Tests.Should;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Scalar.UnitTests.Prefetch
{
[TestFixture]
public class PrefetchPacksDeserializerTests
{
private static readonly byte[] PrefetchPackExpectedHeader
= new byte[]
{
(byte)'G', (byte)'P', (byte)'R', (byte)'E', (byte)' ',
1 // Version
};
[TestCase]
public void PrefetchPacksDeserializer_No_Packs_Succeeds()
{
this.RunPrefetchPacksDeserializerTest(0, false);
}
[TestCase]
public void PrefetchPacksDeserializer_Single_Pack_With_Index_Receives_Both()
{
this.RunPrefetchPacksDeserializerTest(1, true);
}
[TestCase]
public void PrefetchPacksDeserializer_Single_Pack_Without_Index_Receives_Only_Pack()
{
this.RunPrefetchPacksDeserializerTest(1, false);
}
[TestCase]
public void PrefetchPacksDeserializer_Multiple_Packs_With_Indexes()
{
this.RunPrefetchPacksDeserializerTest(10, true);
}
[TestCase]
public void PrefetchPacksDeserializer_Multiple_Packs_Without_Indexes()
{
this.RunPrefetchPacksDeserializerTest(10, false);
}
/// <summary>
/// A deterministic way to create somewhat unique packs
/// </summary>
private static byte[] PackForTimestamp(long timestamp)
{
unchecked
{
Random rand = new Random((int)timestamp);
byte[] data = new byte[100];
rand.NextBytes(data);
return data;
}
}
/// <summary>
/// A deterministic way to create somewhat unique indexes
/// </summary>
private static byte[] IndexForTimestamp(long timestamp)
{
unchecked
{
Random rand = new Random((int)-timestamp);
byte[] data = new byte[50];
rand.NextBytes(data);
return data;
}
}
/// <summary>
/// Implementation of the PrefetchPack spec to generate data for tests
/// </summary>
private void WriteToSpecs(Stream stream, long[] packTimestamps, bool withIndexes)
{
// Header
stream.Write(PrefetchPackExpectedHeader, 0, PrefetchPackExpectedHeader.Length);
// PackCount
stream.Write(BitConverter.GetBytes((ushort)packTimestamps.Length), 0, 2);
for (int i = 0; i < packTimestamps.Length; i++)
{
byte[] packContents = PackForTimestamp(packTimestamps[i]);
byte[] indexContents = IndexForTimestamp(packTimestamps[i]);
// Pack Header
// Timestamp
stream.Write(BitConverter.GetBytes(packTimestamps[i]), 0, 8);
// Pack length
stream.Write(BitConverter.GetBytes((long)packContents.Length), 0, 8);
// Pack index length
if (withIndexes)
{
stream.Write(BitConverter.GetBytes((long)indexContents.Length), 0, 8);
}
else
{
stream.Write(BitConverter.GetBytes(-1L), 0, 8);
}
// Pack data
stream.Write(packContents, 0, packContents.Length);
if (withIndexes)
{
stream.Write(indexContents, 0, indexContents.Length);
}
}
}
private void RunPrefetchPacksDeserializerTest(int packCount, bool withIndexes)
{
using (MemoryStream ms = new MemoryStream())
{
long[] packTimestamps = Enumerable.Range(0, packCount).Select(x => (long)x).ToArray();
// Write the data to the memory stream.
this.WriteToSpecs(ms, packTimestamps, withIndexes);
ms.Position = 0;
Dictionary<string, List<Tuple<string, long>>> receivedPacksAndIndexes = new Dictionary<string, List<Tuple<string, long>>>();
foreach (PrefetchPacksDeserializer.PackAndIndex pack in new PrefetchPacksDeserializer(ms).EnumeratePacks())
{
List<Tuple<string, long>> packsAndIndexesByUniqueId;
if (!receivedPacksAndIndexes.TryGetValue(pack.UniqueId, out packsAndIndexesByUniqueId))
{
packsAndIndexesByUniqueId = new List<Tuple<string, long>>();
receivedPacksAndIndexes.Add(pack.UniqueId, packsAndIndexesByUniqueId);
}
using (MemoryStream packContent = new MemoryStream())
using (MemoryStream idxContent = new MemoryStream())
{
pack.PackStream.CopyTo(packContent);
byte[] packData = packContent.ToArray();
packData.ShouldMatchInOrder(PackForTimestamp(pack.Timestamp));
packsAndIndexesByUniqueId.Add(Tuple.Create("pack", pack.Timestamp));
if (pack.IndexStream != null)
{
pack.IndexStream.CopyTo(idxContent);
byte[] idxData = idxContent.ToArray();
idxData.ShouldMatchInOrder(IndexForTimestamp(pack.Timestamp));
packsAndIndexesByUniqueId.Add(Tuple.Create("idx", pack.Timestamp));
}
}
}
receivedPacksAndIndexes.Count.ShouldEqual(packCount, "UniqueId count");
foreach (List<Tuple<string, long>> groupedByUniqueId in receivedPacksAndIndexes.Values)
{
if (withIndexes)
{
groupedByUniqueId.Count.ShouldEqual(2, "Both Pack and Index for UniqueId");
// Should only contain 1 index file
groupedByUniqueId.ShouldContainSingle(x => x.Item1 == "idx");
}
// should only contain 1 pack file
groupedByUniqueId.ShouldContainSingle(x => x.Item1 == "pack");
groupedByUniqueId.Select(x => x.Item2).Distinct().Count().ShouldEqual(1, "Same timestamps for a uniqueId");
}
}
}
}
}