-
Notifications
You must be signed in to change notification settings - Fork 1.9k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Fixed model saving and loading of OneVersusAllTrainer to include SoftMax #4472
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -364,23 +364,29 @@ private OneVersusAllModelParameters(IHostEnvironment env, ModelLoadContext ctx) | |
: base(env, RegistrationName, ctx) | ||
{ | ||
// *** Binary format *** | ||
// bool: useDist | ||
// byte: OutputFormula as byte | ||
// int: predictor count | ||
bool useDist = ctx.Reader.ReadBoolByte(); | ||
OutputFormula outputFormula = (OutputFormula)ctx.Reader.ReadByte(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Very elegant reuse of the byte. #Resolved There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
int len = ctx.Reader.ReadInt32(); | ||
Host.CheckDecode(len > 0); | ||
|
||
if (useDist) | ||
if (outputFormula == OutputFormula.Raw) | ||
{ | ||
var predictors = new TScalarPredictor[len]; | ||
LoadPredictors(Host, predictors, ctx); | ||
_impl = new ImplRaw(predictors); | ||
} | ||
else if (outputFormula == OutputFormula.ProbabilityNormalization) | ||
{ | ||
var predictors = new IValueMapperDist[len]; | ||
LoadPredictors(Host, predictors, ctx); | ||
_impl = new ImplDist(predictors); | ||
} | ||
else | ||
else if (outputFormula == OutputFormula.Softmax) | ||
{ | ||
var predictors = new TScalarPredictor[len]; | ||
LoadPredictors(Host, predictors, ctx); | ||
_impl = new ImplRaw(predictors); | ||
_impl = new ImplSoftmax(predictors); | ||
} | ||
|
||
DistType = new VectorDataViewType(NumberDataViewType.Single, _impl.Predictors.Length); | ||
|
@@ -409,9 +415,10 @@ private protected override void SaveCore(ModelSaveContext ctx) | |
var preds = _impl.Predictors; | ||
|
||
// *** Binary format *** | ||
// bool: useDist | ||
// byte: _impl.OutputFormula as byte | ||
// int: predictor count | ||
ctx.Writer.WriteBoolByte(_impl is ImplDist); | ||
byte[] outputFormula = { (byte)_impl.OutputFormula }; | ||
ctx.Writer.WriteBytesNoCount(outputFormula, 1); | ||
ctx.Writer.Write(preds.Length); | ||
|
||
// Save other streams. | ||
|
@@ -485,6 +492,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) | |
|
||
private abstract class ImplBase : ISingleCanSavePfa | ||
{ | ||
public OutputFormula OutputFormula; | ||
public abstract DataViewType InputType { get; } | ||
public abstract IValueMapper[] Predictors { get; } | ||
public abstract bool CanSavePfa { get; } | ||
|
@@ -536,6 +544,7 @@ internal ImplRaw(TScalarPredictor[] predictors) | |
CanSavePfa = Predictors.All(m => (m as ISingleCanSavePfa)?.CanSavePfa == true); | ||
Contracts.AssertValue(inputType); | ||
InputType = inputType; | ||
OutputFormula = OutputFormula.Raw; | ||
} | ||
|
||
public override ValueMapper<VBuffer<float>, VBuffer<float>> GetMapper() | ||
|
@@ -601,6 +610,7 @@ internal ImplDist(IValueMapperDist[] predictors) | |
CanSavePfa = Predictors.All(m => (m as IDistCanSavePfa)?.CanSavePfa == true); | ||
Contracts.AssertValue(inputType); | ||
InputType = inputType; | ||
OutputFormula = OutputFormula.ProbabilityNormalization; | ||
} | ||
|
||
private bool IsValid(IValueMapperDist mapper, ref VectorDataViewType inputType) | ||
|
@@ -712,6 +722,7 @@ internal ImplSoftmax(TScalarPredictor[] predictors) | |
CanSavePfa = false; | ||
Contracts.AssertValue(inputType); | ||
InputType = inputType; | ||
OutputFormula = OutputFormula.Softmax; | ||
} | ||
|
||
public override ValueMapper<VBuffer<float>, VBuffer<float>> GetMapper() | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -308,7 +308,9 @@ public void LightGbmMulticlassEstimatorWithOptions() | |
{ | ||
var options = new LightGbmMulticlassTrainer.Options | ||
{ | ||
EvaluationMetric = LightGbmMulticlassTrainer.Options.EvaluateMetricType.Default | ||
EvaluationMetric = LightGbmMulticlassTrainer.Options.EvaluateMetricType.Default, | ||
UseSoftmax = true | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You may want two unit tests total; both setting it explicitly as There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
|
||
}; | ||
|
||
var (pipeline, dataView) = GetMulticlassPipeline(); | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
You are changing how model is saved / loaded from disk. For backward compatibility you will need to use versioning #Resolved
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Backward compatibility isnt affected. In the old models, it stored a bool value as a byte. (0 = false = RawScores and 1 = true = Probability Normalization)
All I have done here is to add 2 = SoftMax.
The old models will continue to be read and function as before.
In reply to: 346003757 [](ancestors = 346003757)