Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tensor extensions #4260

Merged
merged 21 commits into from
Oct 2, 2019
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 10 additions & 6 deletions src/Microsoft.ML.Dnn/DnnRetrainTransform.cs
Original file line number Diff line number Diff line change
Expand Up @@ -379,14 +379,18 @@ private void TrainCore(DnnRetrainEstimator.Options options, IDataView input, IDa
ITensorValueGetter[] srcTensorGetters,
Runner runner)
{
float loss = 0;
float metric = 0;
float loss = 0.0f;
float metric = 0.0f;
for (int i = 0; i < inputs.Length; i++)
runner.AddInput(inputs[i], srcTensorGetters[i].GetBufferedBatchTensor());

Tensor[] tensor = runner.Run();
loss = tensor.Length > 0 && tensor[0] != IntPtr.Zero ? (float)tensor[0].ToArray<float>()[0] : 0.0f;
metric = tensor.Length > 1 && tensor[1] != IntPtr.Zero ? (float)tensor[1].ToArray<float>()[0] : 0.0f;
if (tensor.Length > 0 && tensor[0] != IntPtr.Zero)
tensor[0].ToScalar<float>(ref loss);

if (tensor.Length > 1 && tensor[1] != IntPtr.Zero)
tensor[1].ToScalar<float>(ref metric);

return (loss, metric);
}

Expand Down Expand Up @@ -871,7 +875,7 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);

var tensor = outputCache.Outputs[_parent._outputs[iinfo]];
dst = tensor.ToArray<T>()[0];
tensor.ToScalar<T>(ref dst);
};
return valuegetter;
}
Expand Down Expand Up @@ -903,7 +907,7 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[

var editor = VBufferEditor.Create(ref dst, (int)tensorSize);

DnnUtils.FetchData<T>(tensor.ToArray<T>(), editor.Values);
KsenijaS marked this conversation as resolved.
Show resolved Hide resolved
tensor.CopyTo<T>(editor.Values);
dst = editor.Commit();
};
return valuegetter;
Expand Down
28 changes: 19 additions & 9 deletions src/Microsoft.ML.Dnn/ImageClassificationTransform.cs
Original file line number Diff line number Diff line change
Expand Up @@ -235,15 +235,18 @@ private void CacheFeaturizedImagesToDisk(IDataView input, string labelColumnName
ImageClassificationMetrics metrics = new ImageClassificationMetrics();
metrics.Bottleneck = new BottleneckMetrics();
metrics.Bottleneck.DatasetUsed = dataset;
float[] imageArray = null;
while (cursor.MoveNext())
{
labelGetter(ref label);
imagePathGetter(ref imagePath);
var imagePathStr = imagePath.ToString();
var imageTensor = imageProcessor.ProcessImage(imagePathStr);
runner.AddInput(imageTensor, 0);
var featurizedImage = runner.Run()[0]; // Reuse memory?
writer.WriteLine(label - 1 + "," + string.Join(",", featurizedImage.ToArray<float>()));
var featurizedImage = runner.Run()[0]; // Reuse memory
featurizedImage.ToArray<float>(ref imageArray);
Host.Assert((int)featurizedImage.size == imageArray.Length);
writer.WriteLine(label - 1 + "," + string.Join(",", imageArray));
KsenijaS marked this conversation as resolved.
Show resolved Hide resolved
featurizedImage.Dispose();
imageTensor.Dispose();
metrics.Bottleneck.Index++;
Expand Down Expand Up @@ -338,6 +341,8 @@ private void TrainAndEvaluateClassificationLayer(string trainBottleneckFilePath,

ImageClassificationMetrics metrics = new ImageClassificationMetrics();
metrics.Train = new TrainMetrics();
float accuracy = 0;
float crossentropy = 0;
for (int epoch = 0; epoch < epochs; epoch += 1)
{
metrics.Train.Accuracy = 0;
Expand Down Expand Up @@ -378,8 +383,10 @@ private void TrainAndEvaluateClassificationLayer(string trainBottleneckFilePath,
.AddInput(new Tensor(labelBatchPtr, labelTensorShape, TF_DataType.TF_INT64, labelBatchSizeInBytes), 1)
.Run();

metrics.Train.Accuracy += outputTensors[0].ToArray<float>()[0];
metrics.Train.CrossEntropy += outputTensors[1].ToArray<float>()[0];
outputTensors[0].ToScalar<float>(ref accuracy);
outputTensors[1].ToScalar<float>(ref crossentropy);
metrics.Train.Accuracy += accuracy;
metrics.Train.CrossEntropy += crossentropy;

outputTensors[0].Dispose();
outputTensors[1].Dispose();
Expand Down Expand Up @@ -429,7 +436,8 @@ private void TrainAndEvaluateClassificationLayer(string trainBottleneckFilePath,
.AddInput(new Tensor(labelBatchPtr, labelTensorShape, TF_DataType.TF_INT64, labelBatchSizeInBytes), 1)
.Run();

metrics.Train.Accuracy += outputTensors[0].ToArray<float>()[0];
outputTensors[0].ToScalar<float>(ref accuracy);
metrics.Train.Accuracy += accuracy;
metrics.Train.BatchProcessedCount += 1;
batchIndex = 0;

Expand Down Expand Up @@ -799,8 +807,10 @@ private class OutputCache
private ReadOnlyMemory<char> _imagePath;
private Runner _runner;
private ImageProcessor _imageProcessor;
public UInt32 PredictedLabel { get; set; }
public float[] ClassProbabilities { get; set; }
private long _predictedLabel;
public UInt32 PredictedLabel => (uint)_predictedLabel;
private float[] _classProbability;
public float[] ClassProbabilities => _classProbability;
private DataViewRow _inputRow;

public OutputCache(DataViewRow input, ImageClassificationTransformer transformer)
Expand All @@ -826,8 +836,8 @@ public void UpdateCacheIfNeeded()
_imagePathGetter(ref _imagePath);
var processedTensor = _imageProcessor.ProcessImage(_imagePath.ToString());
var outputTensor = _runner.AddInput(processedTensor, 0).Run();
ClassProbabilities = outputTensor[0].ToArray<float>();
PredictedLabel = (UInt32)outputTensor[1].ToArray<long>()[0];
outputTensor[0].ToArray<float>(ref _classProbability);
outputTensor[1].ToScalar<long>(ref _predictedLabel);
outputTensor[0].Dispose();
outputTensor[1].Dispose();
processedTensor.Dispose();
Expand Down
51 changes: 51 additions & 0 deletions src/Microsoft.ML.Dnn/TensorTypeExtensions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System;
using Microsoft.ML.Internal.Utilities;
using NumSharp.Backends;
using NumSharp.Backends.Unmanaged;
using NumSharp.Utilities;
using Tensorflow;

namespace Microsoft.ML.Transforms
{
[BestFriend]
internal static class TensorTypeExtensions
{
public static void ToScalar<T>(this Tensor tensor, ref T dst) where T : unmanaged
{
if (typeof(T).as_dtype() != tensor.dtype)
throw new NotSupportedException();

unsafe
{
dst = *(T*)tensor.buffer;
}

}

public static void CopyTo<T>(this Tensor tensor, Span<T> values) where T: unmanaged
{
if (typeof(T).as_dtype() != tensor.dtype)
throw new NotSupportedException();

unsafe
{
var len = (long)tensor.size;
KsenijaS marked this conversation as resolved.
Show resolved Hide resolved
var src = (T*)tensor.buffer;
var span = new Span<T>(src, checked((int)len));
span.CopyTo(values);
}
}

public static void ToArray<T>(this Tensor tensor, ref T[] array) where T : unmanaged
{
Utils.EnsureSize(ref array, (int)tensor.size, (int)tensor.size, false);
var span = new Span<T>(array);

CopyTo(tensor, span);
}
}
}
6 changes: 2 additions & 4 deletions src/Microsoft.ML.TensorFlow/TensorflowTransform.cs
Original file line number Diff line number Diff line change
Expand Up @@ -588,15 +588,13 @@ protected override Delegate MakeGetter(DataViewRow input, int iinfo, Func<int, b
private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) where T : unmanaged
{
Host.AssertValue(input);

if (_parent.OutputTypes[iinfo].IsStandardScalar())
{
ValueGetter<T> valuegetter = (ref T dst) =>
{
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);

var tensor = outputCache.Outputs[_parent.Outputs[iinfo]];
dst = tensor.ToArray<T>()[0];
tensor.ToScalar<T>(ref dst);
};
return valuegetter;
}
Expand Down Expand Up @@ -628,7 +626,7 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[

var editor = VBufferEditor.Create(ref dst, (int)tensorSize);

DnnUtils.FetchData<T>(tensor.ToArray<T>(), editor.Values);
tensor.CopyTo<T>(editor.Values);
dst = editor.Commit();
};
return valuegetter;
Expand Down