-
Notifications
You must be signed in to change notification settings - Fork 1.9k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Upgrade TF.Net version from 0.10.10 to 0.11.3 #4205
Changes from all commits
2186651
df47f1a
efa7ea8
72b5715
9536843
cd5be6d
3008242
579bdb6
e599ac9
5eaa5d2
42ec0d4
90f7ffd
6f3d33d
ffc95e3
e633904
9c2371c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -18,6 +18,7 @@ | |
using NumSharp; | ||
using Tensorflow; | ||
using static Microsoft.ML.Transforms.Dnn.DnnUtils; | ||
using static Tensorflow.Binding; | ||
|
||
[assembly: LoadableClass(DnnRetrainTransformer.Summary, typeof(IDataTransform), typeof(DnnRetrainTransformer), | ||
typeof(DnnRetrainEstimator.Options), typeof(SignatureDataTransform), DnnRetrainTransformer.UserName, DnnRetrainTransformer.ShortName)] | ||
|
@@ -235,7 +236,8 @@ private void CheckTrainingParameters(DnnRetrainEstimator.Options options) | |
inputTensor.InputType(index); | ||
var tfInputShape = ((Tensor)inputTensor).TensorShape; | ||
|
||
if (isInputVector && (tfInputShape == null || (tfInputShape.NDim == 0))) | ||
var numInputDims = tfInputShape != null ? tfInputShape.ndim : -1; | ||
if (isInputVector && (tfInputShape == null || (numInputDims == 0))) | ||
{ | ||
var vecType = (VectorDataViewType)type; | ||
var colTypeDims = new int[vecType.Dimensions.Length + 1]; | ||
|
@@ -245,13 +247,14 @@ private void CheckTrainingParameters(DnnRetrainEstimator.Options options) | |
|
||
tfInputShape = new TensorShape(colTypeDims); | ||
} | ||
if (tfInputShape.NDim != -1) | ||
if (numInputDims != -1) | ||
{ | ||
var newShape = new int[tfInputShape.NDim]; | ||
newShape[0] = tfInputShape[0] == 0 || tfInputShape[0] == -1 ? batchSize : tfInputShape[0]; | ||
var newShape = new int[numInputDims]; | ||
var dims = tfInputShape.dims; | ||
newShape[0] = dims[0] == 0 || dims[0] == -1 ? batchSize : dims[0]; | ||
|
||
for (int j = 1; j < tfInputShape.NDim; j++) | ||
newShape[j] = tfInputShape[j]; | ||
for (int j = 1; j < numInputDims; j++) | ||
newShape[j] = dims[j]; | ||
tfInputShape = new TensorShape(newShape); | ||
} | ||
|
||
|
@@ -382,10 +385,8 @@ private void TrainCore(DnnRetrainEstimator.Options options, IDataView input, IDa | |
runner.AddInput(inputs[i], srcTensorGetters[i].GetBufferedBatchTensor()); | ||
|
||
Tensor[] tensor = runner.Run(); | ||
var buffer = tensor[0].Data(); | ||
loss = tensor.Length > 0 && tensor[0] != IntPtr.Zero ? (float)tensor[0].Data<float>()[0] : 0.0f; | ||
metric = tensor.Length > 1 && tensor[1] != IntPtr.Zero ? (float)tensor[1].Data<float>()[0] : 0.0f; | ||
var b = tensor.Length > 2 && tensor[2] != IntPtr.Zero ? (float[])tensor[2].Data<float>() : null; | ||
loss = tensor.Length > 0 && tensor[0] != IntPtr.Zero ? (float)tensor[0].ToArray<float>()[0] : 0.0f; | ||
eerhardt marked this conversation as resolved.
Show resolved
Hide resolved
|
||
metric = tensor.Length > 1 && tensor[1] != IntPtr.Zero ? (float)tensor[1].ToArray<float>()[0] : 0.0f; | ||
return (loss, metric); | ||
} | ||
|
||
|
@@ -639,7 +640,7 @@ internal static (TF_DataType[] tfOutputTypes, DataViewType[] outputTypes, (Opera | |
// i.e. the first dimension (if unknown) is assumed to be batch dimension. | ||
// If there are other dimension that are unknown the transformer will return a variable length vector. | ||
// This is the work around in absence of reshape transformer. | ||
int[] dims = shape.NDim > 0 ? shape.Dimensions.Skip(shape[0] == -1 ? 1 : 0).ToArray() : new[] { 0 }; | ||
int[] dims = shape.ndim > 0 ? shape.dims.Skip(shape.dims[0] == -1 ? 1 : 0).ToArray() : new[] { 0 }; | ||
for (int j = 0; j < dims.Length; j++) | ||
dims[j] = dims[j] == -1 ? 0 : dims[j]; | ||
if (dims == null || dims.Length == 0) | ||
|
@@ -780,7 +781,7 @@ public Mapper(DnnRetrainTransformer parent, DataViewSchema inputSchema) : | |
if (type.GetItemType() != expectedType) | ||
throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", _parent._inputs[i], expectedType.ToString(), type.ToString()); | ||
var originalShape = _parent._tfInputShapes[i]; | ||
var shape = originalShape.Dimensions; | ||
var shape = originalShape.dims; | ||
|
||
var colTypeDims = vecType.Dimensions.Select(dim => (int)dim).ToArray(); | ||
if (shape == null || (shape.Length == 0)) | ||
|
@@ -811,18 +812,20 @@ public Mapper(DnnRetrainTransformer parent, DataViewSchema inputSchema) : | |
throw Contracts.Except($"Input shape mismatch: Input '{_parent._inputs[i]}' has shape {originalShape.ToString()}, but input data is of length {typeValueCount}."); | ||
|
||
// Fill in the unknown dimensions. | ||
var l = new int[originalShape.NDim]; | ||
for (int ishape = 0; ishape < originalShape.NDim; ishape++) | ||
l[ishape] = originalShape[ishape] == -1 ? (int)d : originalShape[ishape]; | ||
var originalShapeDims = originalShape.dims; | ||
var originalShapeNdim = originalShape.ndim; | ||
var l = new int[originalShapeNdim]; | ||
for (int ishape = 0; ishape < originalShapeNdim; ishape++) | ||
l[ishape] = originalShapeDims[ishape] == -1 ? (int)d : originalShapeDims[ishape]; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Should this cast also be done outside the loop? #Resolved There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is not an addition in this PR. If required, I can do this change In reply to: 324450404 [](ancestors = 324450404) |
||
_fullySpecifiedShapes[i] = new TensorShape(l); | ||
} | ||
|
||
if (_parent._addBatchDimensionInput) | ||
{ | ||
var l = new int[_fullySpecifiedShapes[i].NDim + 1]; | ||
var l = new int[_fullySpecifiedShapes[i].ndim + 1]; | ||
l[0] = 1; | ||
for (int ishape = 1; ishape < l.Length; ishape++) | ||
l[ishape] = _fullySpecifiedShapes[i][ishape - 1]; | ||
l[ishape] = _fullySpecifiedShapes[i].dims[ishape - 1]; | ||
_fullySpecifiedShapes[i] = new TensorShape(l); | ||
} | ||
} | ||
|
@@ -857,7 +860,7 @@ protected override Delegate MakeGetter(DataViewRow input, int iinfo, Func<int, b | |
return Utils.MarshalInvoke(MakeGetter<int>, type, input, iinfo, srcTensorGetters, activeOutputColNames, outputCache); | ||
} | ||
|
||
private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) | ||
private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) where T: unmanaged | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Can this ever be used with a There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I have made a separate function for when T is string to avoid this failure. In reply to: 324317321 [](ancestors = 324317321) |
||
{ | ||
Host.AssertValue(input); | ||
|
||
|
@@ -868,7 +871,7 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[ | |
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache); | ||
|
||
var tensor = outputCache.Outputs[_parent._outputs[iinfo]]; | ||
dst = tensor.Data<T>()[0]; | ||
dst = tensor.ToArray<T>()[0]; | ||
}; | ||
return valuegetter; | ||
} | ||
|
@@ -881,7 +884,7 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[ | |
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache); | ||
|
||
var tensor = outputCache.Outputs[_parent._outputs[iinfo]]; | ||
var tensorSize = tensor.TensorShape.Dimensions.Where(x => x > 0).Aggregate((x, y) => x * y); | ||
var tensorSize = tensor.TensorShape.dims.Where(x => x > 0).Aggregate((x, y) => x * y); | ||
|
||
var editor = VBufferEditor.Create(ref dst, (int)tensorSize); | ||
DnnUtils.FetchStringData(tensor, editor.Values); | ||
|
@@ -896,11 +899,11 @@ private Delegate MakeGetter<T>(DataViewRow input, int iinfo, ITensorValueGetter[ | |
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache); | ||
|
||
var tensor = outputCache.Outputs[_parent._outputs[iinfo]]; | ||
var tensorSize = tensor.TensorShape.Dimensions.Where(x => x > 0).Aggregate((x, y) => x * y); | ||
var tensorSize = tensor.TensorShape.dims.Where(x => x > 0).Aggregate((x, y) => x * y); | ||
|
||
var editor = VBufferEditor.Create(ref dst, (int)tensorSize); | ||
|
||
DnnUtils.FetchData<T>(tensor.Data<T>(), editor.Values); | ||
DnnUtils.FetchData<T>(tensor.ToArray<T>(), editor.Values); | ||
dst = editor.Commit(); | ||
}; | ||
return valuegetter; | ||
|
@@ -912,6 +915,8 @@ private void UpdateCacheIfNeeded(long position, ITensorValueGetter[] srcTensorGe | |
{ | ||
if (outputCache.Position != position) | ||
{ | ||
if (_parent.Graph.graph_key != tf.get_default_graph().graph_key) | ||
_parent._session.graph.as_default(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What do these lines do? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. if the current graph is not the tf default graph, this line sets it as the default graph. In reply to: 324450509 [](ancestors = 324450509) |
||
Runner runner = new Runner(_parent._session); | ||
|
||
// Feed the inputs. | ||
|
@@ -972,12 +977,12 @@ public TensorValueGetter(DataViewRow input, int colIndex, TensorShape tfShape, b | |
_tfShape = tfShape; | ||
long size = 0; | ||
_position = 0; | ||
if (tfShape.Dimensions.Length != 0) | ||
if (tfShape.dims.Length != 0) | ||
{ | ||
size = 1; | ||
foreach (var dim in tfShape.Dimensions) | ||
foreach (var dim in tfShape.dims) | ||
size *= dim; | ||
_dims = _tfShape.Dimensions.Select(x => (long)x).ToArray(); | ||
_dims = _tfShape.dims.Select(x => (long)x).ToArray(); | ||
} | ||
if (keyType) | ||
_bufferedDataLong = new long[size]; | ||
|
@@ -993,13 +998,13 @@ public Tensor GetTensor() | |
if (_keyType) | ||
{ | ||
var tensor = new Tensor(new[] { Convert.ToInt64(scalar) - 1 }); | ||
tensor.SetShape(_tfShape); | ||
tensor.set_shape(_tfShape); | ||
return tensor; | ||
} | ||
else | ||
{ | ||
var tensor = new Tensor(new[] { scalar }); | ||
tensor.SetShape(_tfShape); | ||
tensor.set_shape(_tfShape); | ||
return tensor; | ||
} | ||
} | ||
|
@@ -1095,16 +1100,16 @@ public TensorValueGetterVec(DataViewRow input, int colIndex, TensorShape tfShape | |
|
||
long size = 0; | ||
_position = 0; | ||
if (tfShape.Dimensions.Length != 0) | ||
if (tfShape.dims.Length != 0) | ||
{ | ||
size = 1; | ||
foreach (var dim in tfShape.Dimensions) | ||
foreach (var dim in tfShape.dims) | ||
size *= dim; | ||
} | ||
_bufferedData = new T[size]; | ||
_bufferedDataSize = size; | ||
if (_tfShape.Dimensions != null) | ||
_dims = _tfShape.Dimensions.Select(x => (long)x).ToArray(); | ||
if (_tfShape.dims != null) | ||
_dims = _tfShape.dims.Select(x => (long)x).ToArray(); | ||
} | ||
|
||
public Tensor GetTensor() | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -11,7 +11,7 @@ | |
using Microsoft.ML.Data; | ||
using Microsoft.ML.Runtime; | ||
using Tensorflow; | ||
using static Tensorflow.Python; | ||
using static Tensorflow.Binding; | ||
|
||
namespace Microsoft.ML.Transforms.Dnn | ||
{ | ||
|
@@ -92,11 +92,10 @@ internal static Session LoadTFSession(IExceptionContext ectx, byte[] modelBytes, | |
|
||
internal static Graph LoadMetaGraph(string path) | ||
{ | ||
return tf_with(tf.Graph().as_default(), graph => | ||
{ | ||
tf.train.import_meta_graph(path); | ||
return graph; | ||
}); | ||
var graph = new Graph(); | ||
graph = graph.as_default(); | ||
tf.train.import_meta_graph(path); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
It seems like there is no connection between There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This creates a new graph and the 'as_default()' sets this as the tf default graph https://github.com/SciSharp/TensorFlow.NET/blob/41432600c8263cf972b13930c31479ffb412fa65/src/TensorFlowNET.Core/Graphs/Graph.cs#L134. The 'graph' is where the meta graph is loaded and returned. In reply to: 324450631 [](ancestors = 324450631) |
||
return graph; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. remove new lines. #Resolved |
||
} | ||
|
||
internal static Session LoadTFSessionByModelFilePath(IExceptionContext ectx, string modelFile, bool metaGraph = false) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This should be ordered alphabetically. #Resolved
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
VS shows a warning if I reorder. I am guessing the 'using static' derivatives should appear at the end, and they are already ordered alphabetically.
In reply to: 324450008 [](ancestors = 324450008)