Open
Description
I have a fully working tensorflow model and I litterally just need the last step of having C# run my model, but I am stuck on a null exception.
I have a very simple setup, and I've locked down both sequence length and batch size, however no matter what i do it gives me the exception:
at Microsoft.ML.Data.TypedCursorable`1.TypedRowBase.<>c__DisplayClass8_0`1.<CreateDirectVBufferSetter>b__0(TRow row)
at Microsoft.ML.Data.TypedCursorable`1.TypedRowBase.FillValues(TRow row)
at Microsoft.ML.Data.TypedCursorable`1.RowImplementation.FillValues(TRow row)
at Microsoft.ML.PredictionEngineBase`2.FillValues(TDst prediction)
at Microsoft.ML.PredictionEngine`2.Predict(TSrc example, TDst& prediction)
at MyProject.Model.Run() in
I have tested that the model works in python and I've made 100% sure the dimensions fit exactly.
public record Features
{
[ColumnName("x_1")]
[VectorType(1, 41, 3)]
public int[,,] UnigramWindows { get; set; } = null!;
[ColumnName("x_2")]
[VectorType(1, 41, 3)]
public int[,,] BigramWindows { get; set; } = null!;
[ColumnName("x_3")]
[VectorType(1, 41, 3)]
public int[,,] CharTypeWindows { get; set; } = null!;
[ColumnName("x_4")]
[VectorType(1, 41, 41)]
public int[,,] WordsStartingAt { get; set; } = null!;
[ColumnName("x_5")]
[VectorType(1, 41, 41)]
public int[,,] WordsEndingAt { get; set; } = null!;
[ColumnName("x")]
[VectorType(1)]
public int[] SeqLen { get; set; } = null!;
}
private record Output
{
[VectorType(1, 41, 6)]
public float[,,] Identity;
}
private static ITransformer LoadModel(
MLContext mlContext,
string modelPath)
{
var tfModel = mlContext.Model
.LoadTensorFlowModel(modelPath);
var schema = tfModel.GetModelSchema();
var revSchema = schema.Reverse().ToArray();
var pipeline =
tfModel
.ScoreTensorFlowModel(
outputColumnNames: new[] { "Identity" },
inputColumnNames:
new[] {
"x",
"x_1",
"x_2",
"x_3",
"x_4",
"x_5",
},
addBatchDimensionInput: false);
var dataView = mlContext.Data.LoadFromEnumerable(Enumerable.Empty<Features>());
ITransformer mlModel = pipeline.Fit(dataView);
return mlModel;
}
public static run()
{
var model = LoadModel(mlContext, "model.pb");
var predictionEngine = mlContext
.Model
.CreatePredictionEngine<Features, Output>(model);
var res = predictionEngine.Predict(features);
Console.WriteLine(System.Text.Json.JsonSerializer.Serialize(res));
}