Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ML.Net and Tensorflow integration demo. #3

Open
wants to merge 19 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions Microsoft.ML.sln
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.CpuMath.UnitTe
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.CpuMath.UnitTests.netcoreapp", "test\Microsoft.ML.CpuMath.UnitTests.netcoreapp\Microsoft.ML.CpuMath.UnitTests.netcoreapp.csproj", "{5F81A2A4-73AD-494C-B387-07D605EC8826}"
EndProject

Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Microsoft.ML.FSharp.Tests", "test\Microsoft.ML.FSharp.Tests\Microsoft.ML.FSharp.Tests.fsproj", "{802233D6-8CC0-46AD-9F23-FEE1E9AED9B3}"
Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "Microsoft.ML.FSharp.Tests", "test\Microsoft.ML.FSharp.Tests\Microsoft.ML.FSharp.Tests.fsproj", "{802233D6-8CC0-46AD-9F23-FEE1E9AED9B3}"

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705 [](start = 10, length = 36)

Why did this change?

EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.ImageAnalytics", "src\Microsoft.ML.ImageAnalytics\Microsoft.ML.ImageAnalytics.csproj", "{00E38F77-1E61-4CDF-8F97-1417D4E85053}"
EndProject
Expand Down Expand Up @@ -426,11 +425,11 @@ Global
{001F3B4E-FBE4-4001-AFD2-A6A989CD1C25} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{DCF46B79-1FDB-4DBA-A263-D3D64E3AAA27} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{BF66A305-DF10-47E4-8D81-42049B149D2B} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{B4E55B2D-2A92-46E7-B72F-E76D6FD83440} = {7F13E156-3EBA-4021-84A5-CD56BA72F99E}
{3E4ABF07-7970-4BE6-B45B-A13D3C397545} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{7333EDEF-4144-405C-A5EC-6F42201857D8} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{A0E562A9-0E6D-470D-B180-6EB44BA84D60} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{5F81A2A4-73AD-494C-B387-07D605EC8826} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{B4E55B2D-2A92-46E7-B72F-E76D6FD83440} = {7F13E156-3EBA-4021-84A5-CD56BA72F99E}
{3E4ABF07-7970-4BE6-B45B-A13D3C397545} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{802233D6-8CC0-46AD-9F23-FEE1E9AED9B3} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{00E38F77-1E61-4CDF-8F97-1417D4E85053} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{A7222F41-1CF0-47D9-B80C-B4D77B027A61} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
Expand Down
19 changes: 18 additions & 1 deletion src/Microsoft.ML.Transforms/Microsoft.ML.Transforms.csproj
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
<IncludeInPackage>Microsoft.ML</IncludeInPackage>
<DefineConstants>CORECLR</DefineConstants>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>

<ItemGroup>
Expand Down Expand Up @@ -56,6 +57,11 @@
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
</Compile>
<Compile Update="TensorFlow\TensorGeneric.cs">
<DesignTime>True</DesignTime>
<AutoGen>True</AutoGen>
<DependentUpon>TensorGeneric.tt</DependentUpon>
</Compile>
</ItemGroup>

<ItemGroup>
Expand All @@ -65,4 +71,15 @@
</EmbeddedResource>
</ItemGroup>

<ItemGroup>
<None Update="TensorFlow\TensorGeneric.tt">
<Generator>TextTemplatingFileGenerator</Generator>
<LastGenOutput>TensorGeneric.cs</LastGenOutput>
</None>
</ItemGroup>

<ItemGroup>
<Service Include="{508349b6-6b84-4df5-91f0-309beebad82d}" />
</ItemGroup>

</Project>
211 changes: 211 additions & 0 deletions src/Microsoft.ML.Transforms/TensorFlow/Buffer.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,211 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System;
using System.Runtime.InteropServices;
using System.Text;
using size_t = System.UIntPtr;

#pragma warning disable MSML_GeneralName
#pragma warning disable MSML_ParameterLocalVarName

namespace Microsoft.ML.Transforms.TensorFlow
{
/// <summary>
Copy link

@yaeldekel yaeldekel Aug 16, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[](start = 0, length = 1)

Change the tabs to spaces. (in the other files from TFSharp as well). #Resolved

/// This attribute can be applied to callback functions that will be invoked
/// from unmanaged code to managed code.
/// </summary>
/// <remarks>
/// <code>
/// [TensorFlow.MonoPInvokeCallback (typeof (BufferReleaseFunc))]
/// internal static void MyFreeFunc (IntPtr data, IntPtr length){..}
/// </code>
/// </remarks>
internal sealed class MonoPInvokeCallbackAttribute : Attribute
{
/// <summary>
/// Use this constructor to annotate the type of the callback function that
/// will be invoked from unmanaged code.
/// </summary>
/// <param name="t">T.</param>
public MonoPInvokeCallbackAttribute (Type t) { }
}

[StructLayout (LayoutKind.Sequential)]
internal struct LLBuffer
{
internal IntPtr data;
internal size_t length;
internal IntPtr data_deallocator;
}

/// <summary>
/// Holds a block of data, suitable to pass, or retrieve from TensorFlow.
/// </summary>
/// <remarks>
/// <para>
/// Use the TFBuffer to blobs of data into TensorFlow, or to retrieve blocks
/// of data out of TensorFlow.
/// </para>
/// <para>
/// There are two constructors to wrap existing data, one to wrap blocks that are
/// pointed to by an IntPtr and one that takes a byte array that we want to wrap.
/// </para>
/// <para>
/// The empty constructor can be used to create a new TFBuffer that can be populated
/// by the TensorFlow library and returned to user code.
/// </para>
/// <para>
/// Typically, the data consists of a serialized protocol buffer, but other data
/// may also be held in a buffer.
/// </para>
/// </remarks>
// TODO: the string ctor
// TODO: perhaps we should have an implicit byte [] conversion that just calls ToArray?
internal class TFBuffer : TFDisposable
{
// extern TF_Buffer * TF_NewBufferFromString (const void *proto, size_t proto_len);
[DllImport (NativeBinding.TensorFlowLibrary)]
private static extern unsafe LLBuffer* TF_NewBufferFromString (IntPtr proto, IntPtr proto_len);

// extern TF_Buffer * TF_NewBuffer ();
[DllImport (NativeBinding.TensorFlowLibrary)]
private static extern unsafe LLBuffer* TF_NewBuffer ();

internal TFBuffer (IntPtr handle) : base (handle) { }

/// <summary>
/// Initializes a new instance of the <see cref="T:TensorFlow.TFBuffer"/> class.
/// </summary>
public unsafe TFBuffer () : base ((IntPtr)TF_NewBuffer ())
{
}

/// <summary>
/// Signature of the method that is invoked to release the data.
/// </summary>
/// <remarks>
/// Methods of this signature are invoked with the data pointer and the
/// lenght pointer when then TFBuffer no longer needs to hold on to the
/// data. If you are using this on platforms with static compilation
/// like iOS, you need to annotate your callback with the MonoPInvokeCallbackAttribute,
/// like this:
///
/// <code>
/// [TensorFlow.MonoPInvokeCallback (typeof (BufferReleaseFunc))]
/// internal static void MyFreeFunc (IntPtr data, IntPtr length){..}
/// </code>
/// </remarks>
public delegate void BufferReleaseFunc (IntPtr data, IntPtr lenght);

/// <summary>
/// Initializes a new instance of the <see cref="T:TensorFlow.TFBuffer"/> by wrapping the unmanaged resource pointed by the buffer.
/// </summary>
/// <param name="buffer">Pointer to the data that will be wrapped.</param>
/// <param name="size">The size of the buffer to wrap.</param>
/// <param name="release">Optional, if not null, this method will be invoked to release the block.</param>
/// <remarks>
/// This constructor wraps the buffer as a the data to be held by the <see cref="T:TensorFlow.TFBuffer"/>,
/// if the release parameter is null, then you must ensure that the data is not released before the TFBuffer
/// is no longer in use. If the value is not null, the provided method will be invoked to release
/// the data when the TFBuffer is disposed, or the contents of the buffer replaced.
/// </remarks>
public unsafe TFBuffer (IntPtr buffer, long size, BufferReleaseFunc release) : base ((IntPtr)TF_NewBuffer ())
{
LLBuffer* buf = (LLBuffer*)handle;
buf->data = buffer;
buf->length = (size_t)size;
if (release == null)
buf->data_deallocator = IntPtr.Zero;
else
buf->data_deallocator = Marshal.GetFunctionPointerForDelegate (release);
}

[MonoPInvokeCallback (typeof (BufferReleaseFunc))]
internal static void FreeBlock (IntPtr data, IntPtr length)
{
Marshal.FreeHGlobal (data);
}

internal static IntPtr FreeBufferFunc;
internal static BufferReleaseFunc FreeBlockDelegate;

static TFBuffer ()
{
FreeBlockDelegate = FreeBlock;
FreeBufferFunc = Marshal.GetFunctionPointerForDelegate<BufferReleaseFunc> (FreeBlockDelegate);
}

/// <summary>
/// Initializes a new instance of the <see cref="T:TensorFlow.TFBuffer"/> by making a copy of the provided byte array.
/// </summary>
/// <param name="buffer">Buffer of data that will be wrapped.</param>
/// <remarks>
/// This constructor makes a copy of the data into an unmanaged buffer,
/// so the byte array is not pinned.
/// </remarks>
public TFBuffer (byte [] buffer) : this (buffer, 0, buffer.Length) { }

/// <summary>
/// Initializes a new instance of the <see cref="T:TensorFlow.TFBuffer"/> by making a copy of the provided byte array.
/// </summary>
/// <param name="buffer">Buffer of data that will be wrapped.</param>
/// <param name="start">Starting offset into the buffer to wrap.</param>
/// <param name="count">Number of bytes from the buffer to keep.</param>
/// <remarks>
/// This constructor makes a copy of the data into an unmanaged buffer,
/// so the byte array is not pinned.
/// </remarks>
public TFBuffer (byte [] buffer, int start, int count) : this ()
{
if (start < 0 || start >= buffer.Length)
throw new ArgumentException ("start");
if (count < 0 || count > buffer.Length - start)
throw new ArgumentException ("count");
unsafe
{
LLBuffer* buf = LLBuffer;
buf->data = Marshal.AllocHGlobal (count);
Marshal.Copy (buffer, start, buf->data, count);
buf->length = (size_t)count;
buf->data_deallocator = FreeBufferFunc;
}
}

internal unsafe LLBuffer* LLBuffer => (LLBuffer*)handle;

// extern void TF_DeleteBuffer (TF_Buffer *);
[DllImport (NativeBinding.TensorFlowLibrary)]
private static extern unsafe void TF_DeleteBuffer (LLBuffer* buffer);

internal override void NativeDispose (IntPtr handle)
{
unsafe { TF_DeleteBuffer ((LLBuffer*)handle); }
}

// extern TF_Buffer TF_GetBuffer (TF_Buffer *buffer);
[DllImport (NativeBinding.TensorFlowLibrary)]
private static extern unsafe LLBuffer TF_GetBuffer (LLBuffer* buffer);

/// <summary>
/// Returns a byte array representing the data wrapped by this buffer.
/// </summary>
/// <returns>The array.</returns>
public byte [] ToArray ()
{
if (handle == IntPtr.Zero)
return null;

unsafe
{
var lb = (LLBuffer*)handle;

var result = new byte [(int)lb->length];
Marshal.Copy (lb->data, result, 0, (int)lb->length);

return result;
}
}
}
}
Loading