Skip to content

Commit

Permalink
Implement incremental hashing that works on CoreCLR as well as Desktop
Browse files Browse the repository at this point in the history
  • Loading branch information
tmat committed Apr 27, 2016
1 parent e9488f3 commit 38db50e
Show file tree
Hide file tree
Showing 9 changed files with 317 additions and 103 deletions.
5 changes: 3 additions & 2 deletions src/Compilers/Core/Portable/CodeAnalysis.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
</Content>
</ItemGroup>
<ItemGroup>
<Compile Include="InternalUtilities\IncrementalHash.cs" />
<Compile Include="System\Reflection\Blob.cs" />
<Compile Include="System\Reflection\BlobWriter.cs" />
<Compile Include="System\Reflection\BlobWriterImpl.cs" />
Expand Down Expand Up @@ -110,7 +111,7 @@
<Compile Include="DiagnosticAnalyzer\SuppressMessageInfo.cs" />
<Compile Include="Diagnostic\SuppressionInfo.cs" />
<Compile Include="InternalUtilities\JsonWriter.cs" />
<Compile Include="InternalUtilities\HashAlgorithmExtensions.cs" />
<Compile Include="InternalUtilities\IncrementalHashExtensions.cs" />
<Compile Include="InternalUtilities\SetWithInsertionOrder.cs" />
<Compile Include="InternalUtilities\StackGuard.cs" />
<Compile Include="InternalUtilities\StreamExtensions.cs" />
Expand Down Expand Up @@ -819,4 +820,4 @@
<ImportGroup Label="Targets">
<Import Project="..\..\..\..\build\Targets\VSL.Imports.targets" />
</ImportGroup>
</Project>
</Project>
5 changes: 3 additions & 2 deletions src/Compilers/Core/Portable/CryptographicHashProvider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,10 @@ internal static ImmutableArray<byte> ComputeSha1(byte[] bytes)

internal static ImmutableArray<byte> ComputeSha1(BlobBuilder bytes)
{
using (var hashProvider = new SHA1CryptoServiceProvider())
using (var incrementalHash = IncrementalHash.Create(AssemblyHashAlgorithm.Sha1))
{
return ImmutableArray.Create(hashProvider.ComputeHash(bytes));
incrementalHash.AppendData(bytes);
return ImmutableArray.Create(incrementalHash.GetHashAndReset());
}
}
}
Expand Down

This file was deleted.

38 changes: 0 additions & 38 deletions src/Compilers/Core/Portable/InternalUtilities/HashAlgorithms.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,6 @@ namespace Roslyn.Utilities

internal abstract class HashAlgorithm : IDisposable
{
private static readonly MethodInfo s_transformBlock = PortableShim.HashAlgorithm.Type
.GetTypeInfo()
.GetDeclaredMethod(nameof(TransformBlock), new[] { typeof(byte[]), typeof(int), typeof(int), typeof(byte[]), typeof(int) });

private static readonly MethodInfo s_transformFinalBlock = PortableShim.HashAlgorithm.Type
.GetTypeInfo()
.GetDeclaredMethod(nameof(TransformFinalBlock), new[] { typeof(byte[]), typeof(int), typeof(int) });

private static readonly PropertyInfo s_hash = PortableShim.HashAlgorithm.Type
.GetTypeInfo()
.GetDeclaredProperty(nameof(Hash));

private readonly IDisposable _hashInstance;

protected HashAlgorithm(IDisposable hashInstance)
Expand All @@ -46,32 +34,6 @@ public byte[] ComputeHash(Stream inputStream)
return PortableShim.HashAlgorithm.ComputeHash(_hashInstance, inputStream);
}

public bool SupportsTransform =>
s_transformBlock != null &&
s_transformFinalBlock != null &&
s_hash != null;

/// <summary>
/// Invoke the underlying HashAlgorithm's TransformBlock operation on the provided data.
/// </summary>
public void TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount)
{
while (inputCount > 0)
{
int written = (int)s_transformBlock.Invoke(_hashInstance, new object[] { inputBuffer, inputOffset, inputCount, inputBuffer, inputOffset });
Debug.Assert(inputCount == written); // does the TransformBlock method always consume the complete data given to it?
inputCount -= written;
inputOffset += written;
}
}

public void TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount)
{
s_transformFinalBlock.Invoke(_hashInstance, new object[] { inputBuffer, inputOffset, inputCount });
}

public byte[] Hash => (byte[])s_hash.GetMethod.Invoke(_hashInstance, new object[] { });

public void Dispose()
{
_hashInstance.Dispose();
Expand Down
151 changes: 151 additions & 0 deletions src/Compilers/Core/Portable/InternalUtilities/IncrementalHash.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.

using System;
using System.Diagnostics;
using System.Reflection;
using Microsoft.CodeAnalysis;

namespace Roslyn.Utilities
{
/// <summary>
/// Implementation of System.Security.Cryptography.IncrementalHash that works accross Desktop and Core CLR.
/// Remove once we can use the real one from netstandard1.3.
/// </summary>
internal abstract class IncrementalHash : IDisposable
{
/// <summary>
/// Append the entire contents of <paramref name="data"/> to the data already processed in the hash or HMAC.
/// </summary>
/// <param name="data">The data to process.</param>
public void AppendData(byte[] data) => AppendData(data, 0, data.Length);

/// <summary>
/// Append <paramref name="count"/> bytes of <paramref name="data"/>, starting at <paramref name="offset"/>,
/// to the data already processed in the hash.
/// </summary>
/// <param name="data">The data to process.</param>
/// <param name="offset">The offset into the byte array from which to begin using data.</param>
/// <param name="count">The number of bytes in the array to use as data.</param>
public abstract void AppendData(byte[] data, int offset, int count);

/// <summary>
/// Retrieve the hash for the data accumulated from prior calls to
/// <see cref="AppendData(byte[])"/>, and return to the state the object
/// was in at construction.
/// </summary>
public abstract byte[] GetHashAndReset();

public abstract void Dispose();

public static IncrementalHash Create(AssemblyHashAlgorithm hashAlgorithm)
{
if (PortableShim.IncrementalHash.TypeOpt != null)
{
return new Core(hashAlgorithm);
}
else
{
return new Desktop(hashAlgorithm);
}
}

/// <summary>
/// CoreCLR implementation.
/// </summary>
private sealed class Core : IncrementalHash
{
// IncrementalHash
private readonly IDisposable _incrementalHashImpl;

internal Core(AssemblyHashAlgorithm hashAlgorithm)
{
var name = GetHashAlgorithmNameObj(hashAlgorithm);
_incrementalHashImpl = PortableShim.IncrementalHash.CreateHash(name);
}

/// <summary>
/// Returns the actual FX implementation of HashAlgorithmName for given hash algorithm id.
/// </summary>
private static object GetHashAlgorithmNameObj(AssemblyHashAlgorithm algorithmId)
{
switch (algorithmId)
{
case AssemblyHashAlgorithm.Sha1:
return PortableShim.HashAlgorithmName.SHA1;

default:
// More algorithms can be added as needed.
throw ExceptionUtilities.UnexpectedValue(algorithmId);
}
}

public override void AppendData(byte[] data, int offset, int count) => PortableShim.IncrementalHash.AppendData(_incrementalHashImpl, data, offset, count);
public override byte[] GetHashAndReset() => PortableShim.IncrementalHash.GetHashAndReset(_incrementalHashImpl);
public override void Dispose() => _incrementalHashImpl.Dispose();
}

/// <summary>
/// Desktop implementation.
/// </summary>
private sealed class Desktop : IncrementalHash
{
// HashAlgorithm
private readonly IDisposable _hashAlgorithmImpl;

internal Desktop(AssemblyHashAlgorithm hashAlgorithm)
{
_hashAlgorithmImpl = GetAlgorithmImpl(hashAlgorithm);
}

/// <summary>
/// Returns the actual FX implementation of HashAlgorithm.
/// </summary>
private static IDisposable GetAlgorithmImpl(AssemblyHashAlgorithm algorithmId)
{
switch (algorithmId)
{
case AssemblyHashAlgorithm.None:
case AssemblyHashAlgorithm.Sha1:
return PortableShim.SHA1.Create();

case AssemblyHashAlgorithm.Sha256:
return PortableShim.SHA256.Create();

case AssemblyHashAlgorithm.Sha384:
return PortableShim.SHA384.Create();

case AssemblyHashAlgorithm.Sha512:
return PortableShim.SHA512.Create();

case AssemblyHashAlgorithm.MD5:
return PortableShim.MD5.Create();

default:
throw ExceptionUtilities.UnexpectedValue(algorithmId);
}
}

public override void AppendData(byte[] data, int offset, int count)
{
while (count > 0)
{
int written = PortableShim.HashAlgorithm.TransformBlock(_hashAlgorithmImpl, data, offset, count, data, offset);
Debug.Assert(count == written); // does the TransformBlock method always consume the complete data given to it?
count -= written;
offset += written;
}
}

public override byte[] GetHashAndReset()
{
PortableShim.HashAlgorithm.TransformFinalBlock(_hashAlgorithmImpl, SpecializedCollections.EmptyBytes, 0, 0);
return PortableShim.HashAlgorithm.Hash(_hashAlgorithmImpl);
}

public override void Dispose()
{
_hashAlgorithmImpl.Dispose();
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.

using System;
using Roslyn.Reflection;

namespace Roslyn.Utilities
{
internal static class IncrementalHashExtensions
{
internal static void AppendData(this IncrementalHash hash, BlobBuilder builder)
{
foreach (var blob in builder.GetBlobs())
{
hash.AppendData(blob.GetBytes());
}
}

internal static void AppendData(this IncrementalHash hash, ArraySegment<byte> segment)
{
hash.AppendData(segment.Array, segment.Offset, segment.Count);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ namespace Roslyn.Utilities
{
internal static class ReflectionUtilities
{
private static readonly Type Missing = typeof(void);

public static Type TryGetType(string assemblyQualifiedName)
{
try
Expand All @@ -23,6 +25,16 @@ public static Type TryGetType(string assemblyQualifiedName)
}
}

public static Type TryGetType(ref Type lazyType, string assemblyQualifiedName)
{
if (lazyType == null)
{
lazyType = TryGetType(assemblyQualifiedName) ?? Missing;
}

return (lazyType == Missing) ? null : lazyType;
}

/// <summary>
/// Find a <see cref="Type"/> instance by first probing the contract name and then the name as it
/// would exist in mscorlib. This helps satisfy both the CoreCLR and Desktop scenarios.
Expand All @@ -39,6 +51,16 @@ public static Type GetTypeFromEither(string contractName, string desktopName)
return type;
}

public static Type GetTypeFromEither(ref Type lazyType, string contractName, string desktopName)
{
if (lazyType == null)
{
lazyType = GetTypeFromEither(contractName, desktopName) ?? Missing;
}

return (lazyType == Missing) ? null : lazyType;
}

public static T FindItem<T>(IEnumerable<T> collection, params Type[] paramTypes)
where T : MethodBase
{
Expand Down
21 changes: 8 additions & 13 deletions src/Compilers/Core/Portable/NativePdbWriter/PdbWriter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ internal sealed class PdbLogger
private readonly bool _logging;
private readonly BlobBuilder _logData;
private const int bufferFlushLimit = 64 * 1024;
private readonly HashAlgorithm _hashAlgorithm;
private readonly IncrementalHash _incrementalHash;

internal PdbLogger(bool logging)
{
Expand All @@ -67,13 +67,12 @@ internal PdbLogger(bool logging)
// and we need just one per compile session
// pooling will be couter-productive in such scenario
_logData = new BlobBuilder(bufferFlushLimit);
_hashAlgorithm = new SHA1CryptoServiceProvider();
Debug.Assert(_hashAlgorithm.SupportsTransform);
_incrementalHash = IncrementalHash.Create(AssemblyHashAlgorithm.Sha1);
}
else
{
_logData = null;
_hashAlgorithm = null;
_incrementalHash = null;
}
}

Expand All @@ -83,12 +82,7 @@ private void EnsureSpace(int space)
// that should be very rare though.
if (_logData.Count + space >= bufferFlushLimit)
{
foreach (var blob in _logData.GetBlobs())
{
var segment = blob.GetBytes();
_hashAlgorithm.TransformBlock(segment.Array, segment.Offset, segment.Count);
}

_incrementalHash.AppendData(_logData);
_logData.Clear();
}
}
Expand All @@ -97,14 +91,15 @@ internal byte[] GetLogHash()
{
Debug.Assert(_logData != null);

var hash = _hashAlgorithm.ComputeHash(_logData);
_incrementalHash.AppendData(_logData);
_logData.Clear();
return hash;

return _incrementalHash.GetHashAndReset();
}

internal void Close()
{
_hashAlgorithm?.Dispose();
_incrementalHash?.Dispose();
}

internal enum PdbWriterOperation : byte
Expand Down
Loading

0 comments on commit 38db50e

Please sign in to comment.