diff --git a/build/Dependencies.props b/build/Dependencies.props
index e1338a54e8..bd66c41aad 100644
--- a/build/Dependencies.props
+++ b/build/Dependencies.props
@@ -23,6 +23,7 @@
4.5.0
1.14.0
1
+ 0.11.3
diff --git a/pkg/Microsoft.ML.Dnn/Microsoft.ML.Dnn.nupkgproj b/pkg/Microsoft.ML.Dnn/Microsoft.ML.Dnn.nupkgproj
index b439bbf9ad..696b9752cf 100644
--- a/pkg/Microsoft.ML.Dnn/Microsoft.ML.Dnn.nupkgproj
+++ b/pkg/Microsoft.ML.Dnn/Microsoft.ML.Dnn.nupkgproj
@@ -9,7 +9,7 @@
-
+
diff --git a/src/Microsoft.ML.Dnn/DnnRetrainTransform.cs b/src/Microsoft.ML.Dnn/DnnRetrainTransform.cs
index 99cbd1afa5..2c01bbda72 100644
--- a/src/Microsoft.ML.Dnn/DnnRetrainTransform.cs
+++ b/src/Microsoft.ML.Dnn/DnnRetrainTransform.cs
@@ -18,6 +18,7 @@
using NumSharp;
using Tensorflow;
using static Microsoft.ML.Transforms.Dnn.DnnUtils;
+using static Tensorflow.Binding;
[assembly: LoadableClass(DnnRetrainTransformer.Summary, typeof(IDataTransform), typeof(DnnRetrainTransformer),
typeof(DnnRetrainEstimator.Options), typeof(SignatureDataTransform), DnnRetrainTransformer.UserName, DnnRetrainTransformer.ShortName)]
@@ -235,7 +236,8 @@ private void CheckTrainingParameters(DnnRetrainEstimator.Options options)
inputTensor.InputType(index);
var tfInputShape = ((Tensor)inputTensor).TensorShape;
- if (isInputVector && (tfInputShape == null || (tfInputShape.NDim == 0)))
+ var numInputDims = tfInputShape != null ? tfInputShape.ndim : -1;
+ if (isInputVector && (tfInputShape == null || (numInputDims == 0)))
{
var vecType = (VectorDataViewType)type;
var colTypeDims = new int[vecType.Dimensions.Length + 1];
@@ -245,13 +247,14 @@ private void CheckTrainingParameters(DnnRetrainEstimator.Options options)
tfInputShape = new TensorShape(colTypeDims);
}
- if (tfInputShape.NDim != -1)
+ if (numInputDims != -1)
{
- var newShape = new int[tfInputShape.NDim];
- newShape[0] = tfInputShape[0] == 0 || tfInputShape[0] == -1 ? batchSize : tfInputShape[0];
+ var newShape = new int[numInputDims];
+ var dims = tfInputShape.dims;
+ newShape[0] = dims[0] == 0 || dims[0] == -1 ? batchSize : dims[0];
- for (int j = 1; j < tfInputShape.NDim; j++)
- newShape[j] = tfInputShape[j];
+ for (int j = 1; j < numInputDims; j++)
+ newShape[j] = dims[j];
tfInputShape = new TensorShape(newShape);
}
@@ -382,10 +385,8 @@ private void TrainCore(DnnRetrainEstimator.Options options, IDataView input, IDa
runner.AddInput(inputs[i], srcTensorGetters[i].GetBufferedBatchTensor());
Tensor[] tensor = runner.Run();
- var buffer = tensor[0].Data();
- loss = tensor.Length > 0 && tensor[0] != IntPtr.Zero ? (float)tensor[0].Data()[0] : 0.0f;
- metric = tensor.Length > 1 && tensor[1] != IntPtr.Zero ? (float)tensor[1].Data()[0] : 0.0f;
- var b = tensor.Length > 2 && tensor[2] != IntPtr.Zero ? (float[])tensor[2].Data() : null;
+ loss = tensor.Length > 0 && tensor[0] != IntPtr.Zero ? (float)tensor[0].ToArray()[0] : 0.0f;
+ metric = tensor.Length > 1 && tensor[1] != IntPtr.Zero ? (float)tensor[1].ToArray()[0] : 0.0f;
return (loss, metric);
}
@@ -639,7 +640,7 @@ internal static (TF_DataType[] tfOutputTypes, DataViewType[] outputTypes, (Opera
// i.e. the first dimension (if unknown) is assumed to be batch dimension.
// If there are other dimension that are unknown the transformer will return a variable length vector.
// This is the work around in absence of reshape transformer.
- int[] dims = shape.NDim > 0 ? shape.Dimensions.Skip(shape[0] == -1 ? 1 : 0).ToArray() : new[] { 0 };
+ int[] dims = shape.ndim > 0 ? shape.dims.Skip(shape.dims[0] == -1 ? 1 : 0).ToArray() : new[] { 0 };
for (int j = 0; j < dims.Length; j++)
dims[j] = dims[j] == -1 ? 0 : dims[j];
if (dims == null || dims.Length == 0)
@@ -780,7 +781,7 @@ public Mapper(DnnRetrainTransformer parent, DataViewSchema inputSchema) :
if (type.GetItemType() != expectedType)
throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", _parent._inputs[i], expectedType.ToString(), type.ToString());
var originalShape = _parent._tfInputShapes[i];
- var shape = originalShape.Dimensions;
+ var shape = originalShape.dims;
var colTypeDims = vecType.Dimensions.Select(dim => (int)dim).ToArray();
if (shape == null || (shape.Length == 0))
@@ -811,18 +812,20 @@ public Mapper(DnnRetrainTransformer parent, DataViewSchema inputSchema) :
throw Contracts.Except($"Input shape mismatch: Input '{_parent._inputs[i]}' has shape {originalShape.ToString()}, but input data is of length {typeValueCount}.");
// Fill in the unknown dimensions.
- var l = new int[originalShape.NDim];
- for (int ishape = 0; ishape < originalShape.NDim; ishape++)
- l[ishape] = originalShape[ishape] == -1 ? (int)d : originalShape[ishape];
+ var originalShapeDims = originalShape.dims;
+ var originalShapeNdim = originalShape.ndim;
+ var l = new int[originalShapeNdim];
+ for (int ishape = 0; ishape < originalShapeNdim; ishape++)
+ l[ishape] = originalShapeDims[ishape] == -1 ? (int)d : originalShapeDims[ishape];
_fullySpecifiedShapes[i] = new TensorShape(l);
}
if (_parent._addBatchDimensionInput)
{
- var l = new int[_fullySpecifiedShapes[i].NDim + 1];
+ var l = new int[_fullySpecifiedShapes[i].ndim + 1];
l[0] = 1;
for (int ishape = 1; ishape < l.Length; ishape++)
- l[ishape] = _fullySpecifiedShapes[i][ishape - 1];
+ l[ishape] = _fullySpecifiedShapes[i].dims[ishape - 1];
_fullySpecifiedShapes[i] = new TensorShape(l);
}
}
@@ -857,7 +860,7 @@ protected override Delegate MakeGetter(DataViewRow input, int iinfo, Func, type, input, iinfo, srcTensorGetters, activeOutputColNames, outputCache);
}
- private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache)
+ private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) where T: unmanaged
{
Host.AssertValue(input);
@@ -868,7 +871,7 @@ private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent._outputs[iinfo]];
- dst = tensor.Data()[0];
+ dst = tensor.ToArray()[0];
};
return valuegetter;
}
@@ -881,7 +884,7 @@ private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent._outputs[iinfo]];
- var tensorSize = tensor.TensorShape.Dimensions.Where(x => x > 0).Aggregate((x, y) => x * y);
+ var tensorSize = tensor.TensorShape.dims.Where(x => x > 0).Aggregate((x, y) => x * y);
var editor = VBufferEditor.Create(ref dst, (int)tensorSize);
DnnUtils.FetchStringData(tensor, editor.Values);
@@ -896,11 +899,11 @@ private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent._outputs[iinfo]];
- var tensorSize = tensor.TensorShape.Dimensions.Where(x => x > 0).Aggregate((x, y) => x * y);
+ var tensorSize = tensor.TensorShape.dims.Where(x => x > 0).Aggregate((x, y) => x * y);
var editor = VBufferEditor.Create(ref dst, (int)tensorSize);
- DnnUtils.FetchData(tensor.Data(), editor.Values);
+ DnnUtils.FetchData(tensor.ToArray(), editor.Values);
dst = editor.Commit();
};
return valuegetter;
@@ -912,6 +915,8 @@ private void UpdateCacheIfNeeded(long position, ITensorValueGetter[] srcTensorGe
{
if (outputCache.Position != position)
{
+ if (_parent.Graph.graph_key != tf.get_default_graph().graph_key)
+ _parent._session.graph.as_default();
Runner runner = new Runner(_parent._session);
// Feed the inputs.
@@ -972,12 +977,12 @@ public TensorValueGetter(DataViewRow input, int colIndex, TensorShape tfShape, b
_tfShape = tfShape;
long size = 0;
_position = 0;
- if (tfShape.Dimensions.Length != 0)
+ if (tfShape.dims.Length != 0)
{
size = 1;
- foreach (var dim in tfShape.Dimensions)
+ foreach (var dim in tfShape.dims)
size *= dim;
- _dims = _tfShape.Dimensions.Select(x => (long)x).ToArray();
+ _dims = _tfShape.dims.Select(x => (long)x).ToArray();
}
if (keyType)
_bufferedDataLong = new long[size];
@@ -993,13 +998,13 @@ public Tensor GetTensor()
if (_keyType)
{
var tensor = new Tensor(new[] { Convert.ToInt64(scalar) - 1 });
- tensor.SetShape(_tfShape);
+ tensor.set_shape(_tfShape);
return tensor;
}
else
{
var tensor = new Tensor(new[] { scalar });
- tensor.SetShape(_tfShape);
+ tensor.set_shape(_tfShape);
return tensor;
}
}
@@ -1095,16 +1100,16 @@ public TensorValueGetterVec(DataViewRow input, int colIndex, TensorShape tfShape
long size = 0;
_position = 0;
- if (tfShape.Dimensions.Length != 0)
+ if (tfShape.dims.Length != 0)
{
size = 1;
- foreach (var dim in tfShape.Dimensions)
+ foreach (var dim in tfShape.dims)
size *= dim;
}
_bufferedData = new T[size];
_bufferedDataSize = size;
- if (_tfShape.Dimensions != null)
- _dims = _tfShape.Dimensions.Select(x => (long)x).ToArray();
+ if (_tfShape.dims != null)
+ _dims = _tfShape.dims.Select(x => (long)x).ToArray();
}
public Tensor GetTensor()
diff --git a/src/Microsoft.ML.Dnn/DnnUtils.cs b/src/Microsoft.ML.Dnn/DnnUtils.cs
index 623e103997..c46e0b4c74 100644
--- a/src/Microsoft.ML.Dnn/DnnUtils.cs
+++ b/src/Microsoft.ML.Dnn/DnnUtils.cs
@@ -11,7 +11,7 @@
using Microsoft.ML.Data;
using Microsoft.ML.Runtime;
using Tensorflow;
-using static Tensorflow.Python;
+using static Tensorflow.Binding;
namespace Microsoft.ML.Transforms.Dnn
{
@@ -92,11 +92,10 @@ internal static Session LoadTFSession(IExceptionContext ectx, byte[] modelBytes,
internal static Graph LoadMetaGraph(string path)
{
- return tf_with(tf.Graph().as_default(), graph =>
- {
- tf.train.import_meta_graph(path);
- return graph;
- });
+ var graph = new Graph();
+ graph = graph.as_default();
+ tf.train.import_meta_graph(path);
+ return graph;
}
internal static Session LoadTFSessionByModelFilePath(IExceptionContext ectx, string modelFile, bool metaGraph = false)
diff --git a/src/Microsoft.ML.Dnn/ImageClassificationTransform.cs b/src/Microsoft.ML.Dnn/ImageClassificationTransform.cs
index 4729731972..128b0964aa 100644
--- a/src/Microsoft.ML.Dnn/ImageClassificationTransform.cs
+++ b/src/Microsoft.ML.Dnn/ImageClassificationTransform.cs
@@ -20,7 +20,7 @@
using static Microsoft.ML.Data.TextLoader;
using static Microsoft.ML.Transforms.Dnn.DnnUtils;
using static Microsoft.ML.Transforms.ImageClassificationEstimator;
-using static Tensorflow.Python;
+using static Tensorflow.Binding;
using Architecture = Microsoft.ML.Transforms.ImageClassificationEstimator.Architecture;
[assembly: LoadableClass(ImageClassificationTransformer.Summary, typeof(IDataTransform), typeof(ImageClassificationTransformer),
@@ -170,7 +170,7 @@ private void CheckTrainingParameters(ImageClassificationEstimator.Options option
Host.CheckNonWhiteSpace(options.LabelColumn, nameof(options.LabelColumn));
Host.CheckNonWhiteSpace(options.TensorFlowLabel, nameof(options.TensorFlowLabel));
- if (_session.graph.OperationByName(options.TensorFlowLabel) == null)
+ if (_session.graph.OperationByName(_labelTensor.name.Split(':')[0]) == null)
throw Host.ExceptParam(nameof(options.TensorFlowLabel), $"'{options.TensorFlowLabel}' does not exist in the model");
}
@@ -243,7 +243,7 @@ private void CacheFeaturizedImagesToDisk(IDataView input, string labelColumnName
var imageTensor = imageProcessor.ProcessImage(imagePathStr);
runner.AddInput(imageTensor, 0);
var featurizedImage = runner.Run()[0]; // Reuse memory?
- writer.WriteLine(label - 1 + "," + string.Join(",", featurizedImage.Data()));
+ writer.WriteLine(label - 1 + "," + string.Join(",", featurizedImage.ToArray()));
featurizedImage.Dispose();
imageTensor.Dispose();
metrics.Bottleneck.Index++;
@@ -378,8 +378,8 @@ private void TrainAndEvaluateClassificationLayer(string trainBottleneckFilePath,
.AddInput(new Tensor(labelBatchPtr, labelTensorShape, TF_DataType.TF_INT64, labelBatchSizeInBytes), 1)
.Run();
- metrics.Train.Accuracy += outputTensors[0].Data()[0];
- metrics.Train.CrossEntropy += outputTensors[1].Data()[0];
+ metrics.Train.Accuracy += outputTensors[0].ToArray()[0];
+ metrics.Train.CrossEntropy += outputTensors[1].ToArray()[0];
outputTensors[0].Dispose();
outputTensors[1].Dispose();
@@ -429,7 +429,7 @@ private void TrainAndEvaluateClassificationLayer(string trainBottleneckFilePath,
.AddInput(new Tensor(labelBatchPtr, labelTensorShape, TF_DataType.TF_INT64, labelBatchSizeInBytes), 1)
.Run();
- metrics.Train.Accuracy += outputTensors[0].Data()[0];
+ metrics.Train.Accuracy += outputTensors[0].ToArray()[0];
metrics.Train.BatchProcessedCount += 1;
batchIndex = 0;
@@ -458,17 +458,13 @@ private void TrainAndEvaluateClassificationLayer(string trainBottleneckFilePath,
Tensor evaluationStep = null;
Tensor prediction = null;
Tensor bottleneckTensor = evalGraph.OperationByName(_bottleneckOperationName);
-
- tf_with(evalGraph.as_default(), graph =>
- {
- var (_, _, groundTruthInput, finalTensor) = AddFinalRetrainOps(classCount, options.LabelColumn,
+ evalGraph.as_default();
+ var (_, _, groundTruthInput, finalTensor) = AddFinalRetrainOps(classCount, options.LabelColumn,
options.ScoreColumnName, options.LearningRate, bottleneckTensor, false);
tf.train.Saver().restore(evalSess, _checkpointPath);
(evaluationStep, prediction) = AddEvaluationStep(finalTensor, groundTruthInput);
(_jpegData, _resizedImage) = AddJpegDecoding(299, 299, 3);
- });
-
return (evalSess, _labelTensor, evaluationStep, prediction);
}
@@ -530,14 +526,15 @@ private void VariableSummaries(RefVariable var)
private (Operation, Tensor, Tensor, Tensor) AddFinalRetrainOps(int classCount, string labelColumn,
string scoreColumnName, float learningRate, Tensor bottleneckTensor, bool isTraining)
{
- var (batch_size, bottleneck_tensor_size) = (bottleneckTensor.TensorShape.Dimensions[0], bottleneckTensor.TensorShape.Dimensions[1]);
+ var bottleneckTensorDims = bottleneckTensor.TensorShape.dims;
+ var (batch_size, bottleneck_tensor_size) = (bottleneckTensorDims[0], bottleneckTensorDims[1]);
tf_with(tf.name_scope("input"), scope =>
{
if (isTraining)
{
_bottleneckInput = tf.placeholder_with_default(
bottleneckTensor,
- shape: bottleneckTensor.TensorShape.Dimensions,
+ shape: bottleneckTensorDims ,
name: "BottleneckInputPlaceholder");
}
@@ -559,7 +556,8 @@ private void VariableSummaries(RefVariable var)
RefVariable layerBiases = null;
tf_with(tf.name_scope("biases"), delegate
{
- layerBiases = tf.Variable(tf.zeros(classCount), name: "final_biases");
+ TensorShape shape = new TensorShape(classCount);
+ layerBiases = tf.Variable(tf.zeros(shape), name: "final_biases");
VariableSummaries(layerBiases);
});
@@ -599,11 +597,9 @@ private void AddTransferLearningLayer(string labelColumn,
string scoreColumnName, float learningRate, int classCount)
{
_bottleneckTensor = Graph.OperationByName(_bottleneckOperationName);
- tf_with(Graph.as_default(), delegate
- {
- (_trainStep, _crossEntropy, _labelTensor, _softMaxTensor) =
+ (_trainStep, _crossEntropy, _labelTensor, _softMaxTensor) =
AddFinalRetrainOps(classCount, labelColumn, scoreColumnName, learningRate, _bottleneckTensor, true);
- });
+
}
// Factory method for SignatureLoadDataTransform.
@@ -757,7 +753,7 @@ private protected override void SaveModel(ModelSaveContext ctx)
var buffer = _session.graph.ToGraphDef(status);
ctx.SaveBinaryStream("TFModel", w =>
{
- w.WriteByteArray(buffer.Data);
+ w.WriteByteArray(buffer.MemoryBlock.ToArray());
});
status.Check(true);
}
@@ -830,8 +826,8 @@ public void UpdateCacheIfNeeded()
_imagePathGetter(ref _imagePath);
var processedTensor = _imageProcessor.ProcessImage(_imagePath.ToString());
var outputTensor = _runner.AddInput(processedTensor, 0).Run();
- ClassProbabilities = outputTensor[0].Data();
- PredictedLabel = (UInt32)outputTensor[1].Data()[0];
+ ClassProbabilities = outputTensor[0].ToArray();
+ PredictedLabel = (UInt32)outputTensor[1].ToArray()[0];
outputTensor[0].Dispose();
outputTensor[1].Dispose();
processedTensor.Dispose();
@@ -843,6 +839,7 @@ public void UpdateCacheIfNeeded()
protected override Delegate MakeGetter(DataViewRow input, int iinfo, Func activeOutput, out Action disposer)
{
disposer = null;
+ _parent._session.graph.as_default();
Host.AssertValue(input);
var cache = new OutputCache(input, _parent);
diff --git a/src/Microsoft.ML.Dnn/Microsoft.ML.Dnn.csproj b/src/Microsoft.ML.Dnn/Microsoft.ML.Dnn.csproj
index 30eb3c51ac..b056f074be 100644
--- a/src/Microsoft.ML.Dnn/Microsoft.ML.Dnn.csproj
+++ b/src/Microsoft.ML.Dnn/Microsoft.ML.Dnn.csproj
@@ -16,7 +16,7 @@
-
+
diff --git a/src/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.csproj b/src/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.csproj
index 6b5172192e..d48e454d9b 100644
--- a/src/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.csproj
+++ b/src/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.csproj
@@ -10,7 +10,6 @@
-
diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs
index e1b8ef452e..e888f8283c 100644
--- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs
+++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs
@@ -5,6 +5,7 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
+using System.ComponentModel;
using System.IO;
using System.Linq;
using System.Text;
@@ -20,6 +21,7 @@
using NumSharp;
using Tensorflow;
using static Microsoft.ML.Transforms.Dnn.DnnUtils;
+using static Tensorflow.Binding;
[assembly: LoadableClass(TensorFlowTransformer.Summary, typeof(IDataTransform), typeof(TensorFlowTransformer),
typeof(TensorFlowEstimator.Options), typeof(SignatureDataTransform), TensorFlowTransformer.UserName, TensorFlowTransformer.ShortName)]
@@ -380,7 +382,8 @@ internal static (TF_DataType[] tfOutputTypes, DataViewType[] outputTypes, (Opera
// i.e. the first dimension (if unknown) is assumed to be batch dimension.
// If there are other dimension that are unknown the transformer will return a variable length vector.
// This is the work around in absence of reshape transformer.
- int[] dims = shape.NDim > 0 ? shape.Dimensions.Skip(shape[0] == -1 ? 1 : 0).ToArray() : new[] { 0 };
+ var idims = shape.dims;
+ int[] dims = shape.ndim > 0 ? idims.Skip(idims[0] == -1 ? 1 : 0).ToArray() : new[] { 0 };
for (int j = 0; j < dims.Length; j++)
dims[j] = dims[j] == -1 ? 0 : dims[j];
if (dims == null || dims.Length == 0)
@@ -428,7 +431,7 @@ private protected override void SaveModel(ModelSaveContext ctx)
var buffer = Session.graph.ToGraphDef(status);
ctx.SaveBinaryStream("TFModel", w =>
{
- w.WriteByteArray(buffer.Data);
+ w.WriteByteArray(buffer.MemoryBlock.ToArray());
});
}
@@ -503,7 +506,7 @@ public Mapper(TensorFlowTransformer parent, DataViewSchema inputSchema) :
if (type.GetItemType() != expectedType)
throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", _parent.Inputs[i], expectedType.ToString(), type.ToString());
var originalShape = _parent.TFInputShapes[i];
- var shape = originalShape.Dimensions;
+ var shape = originalShape.dims;
var colTypeDims = vecType.Dimensions.Select(dim => (int)dim).ToArray();
if (shape == null || (shape.Length == 0))
@@ -534,18 +537,20 @@ public Mapper(TensorFlowTransformer parent, DataViewSchema inputSchema) :
throw Contracts.Except($"Input shape mismatch: Input '{_parent.Inputs[i]}' has shape {originalShape.ToString()}, but input data is of length {typeValueCount}.");
// Fill in the unknown dimensions.
- var l = new int[originalShape.NDim];
- for (int ishape = 0; ishape < originalShape.NDim; ishape++)
- l[ishape] = originalShape[ishape] == -1 ? (int)d : originalShape[ishape];
+ var originalShapeNdim = originalShape.ndim;
+ var originalShapeDims = originalShape.dims;
+ var l = new int[originalShapeNdim];
+ for (int ishape = 0; ishape < originalShapeNdim; ishape++)
+ l[ishape] = originalShapeDims[ishape] == -1 ? (int)d : originalShapeDims[ishape];
_fullySpecifiedShapes[i] = new TensorShape(l);
}
if (_parent._addBatchDimensionInput)
{
- var l = new int[_fullySpecifiedShapes[i].NDim + 1];
+ var l = new int[_fullySpecifiedShapes[i].ndim + 1];
l[0] = 1;
for (int ishape = 1; ishape < l.Length; ishape++)
- l[ishape] = _fullySpecifiedShapes[i][ishape - 1];
+ l[ishape] = _fullySpecifiedShapes[i].dims[ishape - 1];
_fullySpecifiedShapes[i] = new TensorShape(l);
}
}
@@ -580,7 +585,7 @@ protected override Delegate MakeGetter(DataViewRow input, int iinfo, Func, type, input, iinfo, srcTensorGetters, activeOutputColNames, outputCache);
}
- private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache)
+ private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) where T : unmanaged
{
Host.AssertValue(input);
@@ -591,7 +596,7 @@ private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent.Outputs[iinfo]];
- dst = tensor.Data()[0];
+ dst = tensor.ToArray()[0];
};
return valuegetter;
}
@@ -604,7 +609,7 @@ private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent.Outputs[iinfo]];
- var tensorSize = tensor.TensorShape.Dimensions.Where(x => x > 0).Aggregate((x, y) => x * y);
+ var tensorSize = tensor.TensorShape.dims.Where(x => x > 0).Aggregate((x, y) => x * y);
var editor = VBufferEditor.Create(ref dst, (int)tensorSize);
DnnUtils.FetchStringData(tensor, editor.Values);
@@ -619,11 +624,11 @@ private Delegate MakeGetter(DataViewRow input, int iinfo, ITensorValueGetter[
UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache);
var tensor = outputCache.Outputs[_parent.Outputs[iinfo]];
- var tensorSize = tensor.TensorShape.Dimensions.Where(x => x > 0).Aggregate((x, y) => x * y);
+ var tensorSize = tensor.TensorShape.dims.Where(x => x > 0).Aggregate((x, y) => x * y);
var editor = VBufferEditor.Create(ref dst, (int)tensorSize);
- DnnUtils.FetchData(tensor.Data(), editor.Values);
+ DnnUtils.FetchData(tensor.ToArray(), editor.Values);
dst = editor.Commit();
};
return valuegetter;
@@ -635,6 +640,8 @@ private void UpdateCacheIfNeeded(long position, ITensorValueGetter[] srcTensorGe
{
if (outputCache.Position != position)
{
+ if (_parent.Graph.graph_key != tf.get_default_graph().graph_key)
+ _parent.Session.graph.as_default();
Runner runner = new Runner(_parent.Session);
// Feed inputs to the graph.
@@ -718,10 +725,10 @@ public TensorValueGetter(DataViewRow input, int colIndex, TensorShape tfShape)
_tfShape = tfShape;
long size = 0;
_position = 0;
- if (tfShape.Dimensions.Length != 0)
+ if (tfShape.dims.Length != 0)
{
size = 1;
- foreach (var dim in tfShape.Dimensions)
+ foreach (var dim in tfShape.dims)
size *= dim;
}
_bufferedData = new T[size];
@@ -731,8 +738,8 @@ public Tensor GetTensor()
{
var scalar = default(T);
_srcgetter(ref scalar);
- var tensor = new Tensor(new[] { scalar });
- tensor.SetShape(_tfShape);
+ var tensor = new Tensor(new[] { scalar });
+ tensor.set_shape(_tfShape);
return tensor;
}
@@ -771,15 +778,15 @@ public TensorValueGetterVec(DataViewRow input, int colIndex, TensorShape tfShape
long size = 0;
_position = 0;
- if (tfShape.Dimensions.Length != 0)
+ if (tfShape.dims.Length != 0)
{
size = 1;
- foreach (var dim in tfShape.Dimensions)
+ foreach (var dim in tfShape.dims)
size *= dim;
}
_bufferedData = new T[size];
- if (_tfShape.Dimensions != null)
- _dims = _tfShape.Dimensions.Select(x => (long)x).ToArray();
+ if (_tfShape.dims != null)
+ _dims = _tfShape.dims.Select(x => (long)x).ToArray();
_bufferedDataSize = size;
}
@@ -792,7 +799,7 @@ public Tensor GetTensor()
// This is done to reduce memory allocation every time tensor is created.
_denseData = new T[_vBuffer.Length];
_vBuffer.CopyTo(_denseData);
- var tensor = CastDataAndReturnAsTensor(_denseData);
+ var tensor = CastDataAndReturnAsTensor(_denseData);
return tensor;
}
diff --git a/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs b/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs
index 906eb2458e..4679cf2470 100644
--- a/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs
+++ b/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs
@@ -47,7 +47,7 @@ internal static DataViewSchema GetModelSchema(IExceptionContext ectx, Graph grap
continue;
// Construct the final ML.NET type of a Tensorflow variable.
- var tensorShape = op.output.TensorShape.Dimensions;
+ var tensorShape = op.output.TensorShape.dims;
var columnType = new VectorDataViewType(mlType);
if (!(Utils.Size(tensorShape) == 1 && tensorShape[0] <= 0) &&
(Utils.Size(tensorShape) > 0 && tensorShape.Skip(1).All(x => x > 0)))