Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed model saving and loading of OneVersusAllTrainer to include SoftMax #4472

Merged
merged 3 commits into from
Nov 14, 2019
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -364,23 +364,29 @@ private OneVersusAllModelParameters(IHostEnvironment env, ModelLoadContext ctx)
: base(env, RegistrationName, ctx)
{
// *** Binary format ***
// bool: useDist
// byte: OutputFormula as byte
Copy link
Member

@ganik ganik Nov 13, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You are changing how model is saved / loaded from disk. For backward compatibility you will need to use versioning #Resolved

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Backward compatibility isnt affected. In the old models, it stored a bool value as a byte. (0 = false = RawScores and 1 = true = Probability Normalization)
All I have done here is to add 2 = SoftMax.

The old models will continue to be read and function as before.


In reply to: 346003757 [](ancestors = 346003757)

// int: predictor count
bool useDist = ctx.Reader.ReadBoolByte();
OutputFormula outputFormula = (OutputFormula)ctx.Reader.ReadByte();
Copy link
Contributor

@justinormont justinormont Nov 12, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Very elegant reuse of the byte. #Resolved

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you !


In reply to: 345483370 [](ancestors = 345483370)

int len = ctx.Reader.ReadInt32();
Host.CheckDecode(len > 0);

if (useDist)
if (outputFormula == OutputFormula.Raw)
{
var predictors = new TScalarPredictor[len];
LoadPredictors(Host, predictors, ctx);
_impl = new ImplRaw(predictors);
}
else if (outputFormula == OutputFormula.ProbabilityNormalization)
{
var predictors = new IValueMapperDist[len];
LoadPredictors(Host, predictors, ctx);
_impl = new ImplDist(predictors);
}
else
else if (outputFormula == OutputFormula.Softmax)
{
var predictors = new TScalarPredictor[len];
LoadPredictors(Host, predictors, ctx);
_impl = new ImplRaw(predictors);
_impl = new ImplSoftmax(predictors);
}

DistType = new VectorDataViewType(NumberDataViewType.Single, _impl.Predictors.Length);
Expand Down Expand Up @@ -409,9 +415,10 @@ private protected override void SaveCore(ModelSaveContext ctx)
var preds = _impl.Predictors;

// *** Binary format ***
// bool: useDist
// byte: _impl.OutputFormula as byte
// int: predictor count
ctx.Writer.WriteBoolByte(_impl is ImplDist);
byte[] outputFormula = { (byte)_impl.OutputFormula };
ctx.Writer.WriteBytesNoCount(outputFormula, 1);
ctx.Writer.Write(preds.Length);

// Save other streams.
Expand Down Expand Up @@ -485,6 +492,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema)

private abstract class ImplBase : ISingleCanSavePfa
{
public OutputFormula OutputFormula;
public abstract DataViewType InputType { get; }
public abstract IValueMapper[] Predictors { get; }
public abstract bool CanSavePfa { get; }
Expand Down Expand Up @@ -536,6 +544,7 @@ internal ImplRaw(TScalarPredictor[] predictors)
CanSavePfa = Predictors.All(m => (m as ISingleCanSavePfa)?.CanSavePfa == true);
Contracts.AssertValue(inputType);
InputType = inputType;
OutputFormula = OutputFormula.Raw;
}

public override ValueMapper<VBuffer<float>, VBuffer<float>> GetMapper()
Expand Down Expand Up @@ -601,6 +610,7 @@ internal ImplDist(IValueMapperDist[] predictors)
CanSavePfa = Predictors.All(m => (m as IDistCanSavePfa)?.CanSavePfa == true);
Contracts.AssertValue(inputType);
InputType = inputType;
OutputFormula = OutputFormula.ProbabilityNormalization;
}

private bool IsValid(IValueMapperDist mapper, ref VectorDataViewType inputType)
Expand Down Expand Up @@ -712,6 +722,7 @@ internal ImplSoftmax(TScalarPredictor[] predictors)
CanSavePfa = false;
Contracts.AssertValue(inputType);
InputType = inputType;
OutputFormula = OutputFormula.Softmax;
}

public override ValueMapper<VBuffer<float>, VBuffer<float>> GetMapper()
Expand Down
4 changes: 3 additions & 1 deletion test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,9 @@ public void LightGbmMulticlassEstimatorWithOptions()
{
var options = new LightGbmMulticlassTrainer.Options
{
EvaluationMetric = LightGbmMulticlassTrainer.Options.EvaluateMetricType.Default
EvaluationMetric = LightGbmMulticlassTrainer.Options.EvaluateMetricType.Default,
UseSoftmax = true
Copy link
Contributor

@justinormont justinormont Nov 13, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You may want two unit tests total; both setting it explicitly as true and false. I believe the hyerparameter defaults to <auto>, where LightGBM decides for you. #Resolved

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed. Thanks!


In reply to: 345950381 [](ancestors = 345950381)


};

var (pipeline, dataView) = GetMulticlassPipeline();
Expand Down