Skip to content

Commit

Permalink
Upgrade TF.Net version from 0.10.10 to 0.11.3 (#4205)
Browse files Browse the repository at this point in the history
* updated Tensorflow .Net from 0.10.10 to 0.11, changed decode_jpeg to decode_image

* Changed decode_image to decode_jpeg, modified few API changes for TF.NET0.11.2 to reproduce memory corruption.

* Fixed null graph error, reproducer for protobuff size error.

* Final changes with all ML Samples running with TF.NET version 0.11.3

* removed local file diff.out

* Corrected for running all ML.Samples

* cleanup

* resolved common issurs with PR

* removed extra tab

* Added a check to set default graph only when current graph is not default graph.

* Changed to that tf.net version dependency can directly be changed from Dependencies.props, removed unnecessary dependency

* Removed unused variable, reverted ImageClassification.cs

* added previously missed package dependency variable for Tensorflow.NET version

* Created a sepatare function for string type so that MakeGetter can only be for unmanaged types

* Additional refactoring for MakeGetter for String type

* Reverting the refactoring as the Microsoft.ML.Scenarios.TensorFlowScenariosTests.TensorFlowStringTest test clarified that MakeGetter works for string type
  • Loading branch information
ashbhandare committed Sep 17, 2019
1 parent a22f30f commit 862ae84
Show file tree
Hide file tree
Showing 9 changed files with 93 additions and 85 deletions.
1 change: 1 addition & 0 deletions build/Dependencies.props
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
<SystemSecurityPrincipalWindows>4.5.0</SystemSecurityPrincipalWindows>
<TensorFlowVersion>1.14.0</TensorFlowVersion>
<TensorFlowMajorVersion>1</TensorFlowMajorVersion>
<TensorflowDotNETVersion>0.11.3</TensorflowDotNETVersion>
</PropertyGroup>

<!-- Code Analyzer Dependencies -->
Expand Down
2 changes: 1 addition & 1 deletion pkg/Microsoft.ML.Dnn/Microsoft.ML.Dnn.nupkgproj
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
<PackageReference Include="SciSharp.TensorFlow.Redist" Version="1.14.0" />
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="$(SystemIOFileSystemAccessControl)" />
<PackageReference Include="System.Security.Principal.Windows" Version="$(SystemSecurityPrincipalWindows)" />
<PackageReference Include="TensorFlow.NET" Version="0.10.10" />
<PackageReference Include="TensorFlow.NET" Version="$(TensorflowDotNETVersion)" />
<ProjectReference Include="../Microsoft.ML/Microsoft.ML.nupkgproj" />
</ItemGroup>

Expand Down
67 changes: 36 additions & 31 deletions src/Microsoft.ML.Dnn/DnnRetrainTransform.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
using NumSharp;
using Tensorflow;
using static Microsoft.ML.Transforms.Dnn.DnnUtils;
using static Tensorflow.Binding;

[assembly: LoadableClass(DnnRetrainTransformer.Summary, typeof(IDataTransform), typeof(DnnRetrainTransformer),
typeof(DnnRetrainEstimator.Options), typeof(SignatureDataTransform), DnnRetrainTransformer.UserName, DnnRetrainTransformer.ShortName)]
Expand Down Expand Up @@ -235,7 +236,8 @@ private void CheckTrainingParameters(DnnRetrainEstimator.Options options)
inputTensor.InputType(index);
var tfInputShape = ((Tensor)inputTensor).TensorShape;

if (isInputVector && (tfInputShape == null || (tfInputShape.NDim == 0)))
var numInputDims = tfInputShape != null ? tfInputShape.ndim : -1;
if (isInputVector && (tfInputShape == null || (numInputDims == 0)))
{
var vecType = (VectorDataViewType)type;
var colTypeDims = new int[vecType.Dimensions.Length + 1];
Expand All @@ -245,13 +247,14 @@ private void CheckTrainingParameters(DnnRetrainEstimator.Options options)

tfInputShape = new TensorShape(colTypeDims);
}
if (tfInputShape.NDim != -1)
if (numInputDims != -1)
{
var newShape = new int[tfInputShape.NDim];
newShape[0] = tfInputShape[0] == 0 || tfInputShape[0] == -1 ? batchSize : tfInputShape[0];
var newShape = new int[numInputDims];
var dims = tfInputShape.dims;
newShape[0] = dims[0] == 0 || dims[0] == -1 ? batchSize : dims[0];

for (int j = 1; j < tfInputShape.NDim; j++)
newShape[j] = tfInputShape[j];
for (int j = 1; j < numInputDims; j++)
newShape[j] = dims[j];
tfInputShape = new TensorShape(newShape);
}

Expand Down Expand Up @@ -382,10 +385,8 @@ private void TrainCore(DnnRetrainEstimator.Options options, IDataView input, IDa
runner.AddInput(inputs[i], srcTensorGetters[i].GetBufferedBatchTensor());

Tensor[] tensor = runner.Run();
var buffer = tensor[0].Data();
loss = tensor.Length > 0 && tensor[0] != IntPtr.Zero ? (float)tensor[0].Data<float>()[0] : 0.0f;
metric = tensor.Length > 1 && tensor[1] != IntPtr.Zero ? (float)tensor[1].Data<float>()[0] : 0.0f;
var b = tensor.Length > 2 && tensor[2] != IntPtr.Zero ? (float[])tensor[2].Data<float>() : null;
loss = tensor.Length > 0 && tensor[0] != IntPtr.Zero ? (float)tensor[0].ToArray<float>()[0] : 0.0f;
metric = tensor.Length > 1 && tensor[1] != IntPtr.Zero ? (float)tensor[1].ToArray<float>()[0] : 0.0f;
return (loss, metric);
}

Expand Down Expand Up @@ -639,7 +640,7 @@ internal static (TF_DataType[] tfOutputTypes, DataViewType[] outputTypes, (Opera
// i.e. the first dimension (if unknown) is assumed to be batch dimension.
// If there are other dimension that are unknown the transformer will return a variable length vector.
// This is the work around in absence of reshape transformer.
int[] dims = shape.NDim > 0 ? shape.Dimensions.Skip(shape[0] == -1 ? 1 : 0).ToArray() : new[] { 0 };
int[] dims = shape.ndim > 0 ? shape.dims.Skip(shape.dims[0] == -1 ? 1 : 0).ToArray() : new[] { 0 };
for (int j = 0; j < dims.Length; j++)
dims[j] = dims[j] == -1 ? 0 : dims[j];
if (dims == null || dims.Length == 0)
Expand Down Expand Up @@ -780,7 +781,7 @@ public Mapper(DnnRetrainTransformer parent, DataViewSchema inputSchema) :
if (type.GetItemType() != expectedType)
throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", _parent._inputs[i], expectedType.ToString(), type.ToString());
var originalShape = _parent._tfInputShapes[i];
var shape = originalShape.Dimensions;
var shape = originalShape.dims;

var colTypeDims = vecType.Dimensions.Select(dim => (int)dim).ToArray();
if (shape == null || (shape.Length == 0))
Expand Down Expand Up @@ -811,18 +812,20 @@ public Mapper(DnnRetrainTransformer parent, DataViewSchema inputSchema) :
throw Contracts.Except($"Input shape mismatch: Input '{_parent._inputs[i]}' has shape {originalShape.ToString()}, but input data is of length {typeValueCount}.");

// Fill in the unknown dimensions.
var l = new int[originalShape.NDim];
for (int ishape = 0; ishape < originalShape.NDim; ishape++)
l[ishape] = originalShape[ishape] == -1 ? (int)d : originalShape[ishape];
var originalShapeDims = originalShape.dims;
var originalShapeNdim = originalShape.ndim;
var l = new int[originalShapeNdim];
for (int ishape = 0; ishape < originalShapeNdim; ishape++)
l[ishape] = originalShapeDims[ishape] == -1 ? (int)d : originalShapeDims[ishape];
_fullySpecifiedShapes[i] = new TensorShape(l);
}

if (_parent._addBatchDimensionInput)
{
var l = new int[_fullySpecifiedShapes[i].NDim + 1];
var l = new int[_fullySpecifiedShapes[i].ndim + 1];
l[0] = 1;
for (int ishape = 1; ishape < l.Length; ishape++)
l[ishape] = _fullySpecifiedShapes[i][ishape - 1];
l[ishape] = _fullySpecifiedShapes[i].dims[ishape - 1];
_fullySpecifiedShapes[i] = new TensorShape(l);
}
}
Expand Down Expand Up @@ -857,7 +860,7 @@ protected override Delegate MakeGetter(DataViewRow input, int iinfo, Func<int, b
return Utils.MarshalInvoke(MakeGetter<int>, type, input, iinfo, srcTensorGetters, activeOutputColNames, outputCache);
}

private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache)
private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) where T: unmanaged
{
Host.AssertValue(input);

Expand All @@ -868,7 +871,7 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent._outputs[iinfo]];
dst = tensor.Data<T>()[0];
dst = tensor.ToArray<T>()[0];
};
return valuegetter;
}
Expand All @@ -881,7 +884,7 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent._outputs[iinfo]];
var tensorSize = tensor.TensorShape.Dimensions.Where(x => x > 0).Aggregate((x, y) => x * y);
var tensorSize = tensor.TensorShape.dims.Where(x => x > 0).Aggregate((x, y) => x * y);
var editor = VBufferEditor.Create(ref dst, (int)tensorSize);
DnnUtils.FetchStringData(tensor, editor.Values);
Expand All @@ -896,11 +899,11 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent._outputs[iinfo]];
var tensorSize = tensor.TensorShape.Dimensions.Where(x => x > 0).Aggregate((x, y) => x * y);
var tensorSize = tensor.TensorShape.dims.Where(x => x > 0).Aggregate((x, y) => x * y);
var editor = VBufferEditor.Create(ref dst, (int)tensorSize);
DnnUtils.FetchData<T>(tensor.Data<T>(), editor.Values);
DnnUtils.FetchData<T>(tensor.ToArray<T>(), editor.Values);
dst = editor.Commit();
};
return valuegetter;
Expand All @@ -912,6 +915,8 @@ private void UpdateCacheIfNeeded(long position, ITensorValueGetter[] srcTensorGe
{
if (outputCache.Position != position)
{
if (_parent.Graph.graph_key != tf.get_default_graph().graph_key)
_parent._session.graph.as_default();
Runner runner = new Runner(_parent._session);

// Feed the inputs.
Expand Down Expand Up @@ -972,12 +977,12 @@ public TensorValueGetter(DataViewRow input, int colIndex, TensorShape tfShape, b
_tfShape = tfShape;
long size = 0;
_position = 0;
if (tfShape.Dimensions.Length != 0)
if (tfShape.dims.Length != 0)
{
size = 1;
foreach (var dim in tfShape.Dimensions)
foreach (var dim in tfShape.dims)
size *= dim;
_dims = _tfShape.Dimensions.Select(x => (long)x).ToArray();
_dims = _tfShape.dims.Select(x => (long)x).ToArray();
}
if (keyType)
_bufferedDataLong = new long[size];
Expand All @@ -993,13 +998,13 @@ public Tensor GetTensor()
if (_keyType)
{
var tensor = new Tensor(new[] { Convert.ToInt64(scalar) - 1 });
tensor.SetShape(_tfShape);
tensor.set_shape(_tfShape);
return tensor;
}
else
{
var tensor = new Tensor(new[] { scalar });
tensor.SetShape(_tfShape);
tensor.set_shape(_tfShape);
return tensor;
}
}
Expand Down Expand Up @@ -1095,16 +1100,16 @@ public TensorValueGetterVec(DataViewRow input, int colIndex, TensorShape tfShape

long size = 0;
_position = 0;
if (tfShape.Dimensions.Length != 0)
if (tfShape.dims.Length != 0)
{
size = 1;
foreach (var dim in tfShape.Dimensions)
foreach (var dim in tfShape.dims)
size *= dim;
}
_bufferedData = new T[size];
_bufferedDataSize = size;
if (_tfShape.Dimensions != null)
_dims = _tfShape.Dimensions.Select(x => (long)x).ToArray();
if (_tfShape.dims != null)
_dims = _tfShape.dims.Select(x => (long)x).ToArray();
}

public Tensor GetTensor()
Expand Down
11 changes: 5 additions & 6 deletions src/Microsoft.ML.Dnn/DnnUtils.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
using Microsoft.ML.Data;
using Microsoft.ML.Runtime;
using Tensorflow;
using static Tensorflow.Python;
using static Tensorflow.Binding;

namespace Microsoft.ML.Transforms.Dnn
{
Expand Down Expand Up @@ -92,11 +92,10 @@ internal static Session LoadTFSession(IExceptionContext ectx, byte[] modelBytes,

internal static Graph LoadMetaGraph(string path)
{
return tf_with(tf.Graph().as_default(), graph =>
{
tf.train.import_meta_graph(path);
return graph;
});
var graph = new Graph();
graph = graph.as_default();
tf.train.import_meta_graph(path);
return graph;
}

internal static Session LoadTFSessionByModelFilePath(IExceptionContext ectx, string modelFile, bool metaGraph = false)
Expand Down
Loading

0 comments on commit 862ae84

Please sign in to comment.