Skip to content

Upgrade all regressors to use TT #3319

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 27 commits into from
Apr 16, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,40 +6,40 @@

namespace Samples.Dynamic.Trainers.Regression
{
public static class FastForest
public static class FastForestRegression
{
// This example requires installation of additional NuGet package
// <a href="https://www.nuget.org/packages/Microsoft.ML.FastTree/">Microsoft.ML.FastTree</a>.
// <a href="https://www.nuget.org/packages/Microsoft.ML.FastTree/">Microsoft.ML.FastTree</a>.
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for exception tracking and logging,
// as a catalog of available operations and as the source of randomness.
// Setting the seed to a fixed number in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);

// Create a list of training examples.
var examples = GenerateRandomDataPoints(1000);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);

// Convert the examples list to an IDataView object, which is consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(examples);
// Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

// Define the trainer.
var pipeline = mlContext.Regression.Trainers.FastForest();
var pipeline = mlContext.Regression.Trainers.FastForest(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));

// Train the model.
var model = pipeline.Fit(trainingData);

// Create testing examples. Use different random seed to make it different from training data.
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
// Create testing data. Use different random seed to make it different from training data.
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));

// Run the model on test data set.
var transformedTestData = model.Transform(testData);

// Convert IDataView object to a list.
var predictions = mlContext.Data.CreateEnumerable<Prediction>(transformedTestData, reuseRowObject: false).ToList();

// Look at 5 predictions
foreach (var p in predictions.Take(5))
// Look at 5 predictions for the Label, side by side with the actual Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");

// Expected output:
Expand All @@ -51,27 +51,26 @@ public static void Example()

// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
Microsoft.ML.SamplesUtils.ConsoleUtils.PrintMetrics(metrics);
PrintMetrics(metrics);

// Expected output:
// Mean Absolute Error: 0.06
// Mean Squared Error: 0.01
// Mean Squared Error: 0.00
// Root Mean Squared Error: 0.07
// RSquared: 0.93
// RSquared: 0.96 (closer to 1 is better. The worest case is 0)
}

private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat();
float label = (float)random.NextDouble();
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with label.
Features = Enumerable.Repeat(label, 50).Select(x => x + randomFloat()).ToArray()
// Create random features that are correlated with the label.
Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
};
}
}
Expand All @@ -92,5 +91,15 @@ private class Prediction
// Predicted score from the trainer.
public float Score { get; set; }
}

// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
}
}
}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>

<#+
string ClassHeader = @"// This example requires installation of additional NuGet package
// <a href=""https://www.nuget.org/packages/Microsoft.ML.FastTree/"">Microsoft.ML.FastTree</a>. ";

string ClassName="FastForestRegression";
string ExtraUsing = null;
string Trainer = @"FastForest(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;

string ExpectedOutputPerInstance= @"// Expected output:
// Label: 0.985, Prediction: 0.864
// Label: 0.155, Prediction: 0.164
// Label: 0.515, Prediction: 0.470
// Label: 0.566, Prediction: 0.501
// Label: 0.096, Prediction: 0.138";

string ExpectedOutput = @"// Expected output:
// Mean Absolute Error: 0.06
// Mean Squared Error: 0.00
// Root Mean Squared Error: 0.07
// RSquared: 0.96 (closer to 1 is better. The worest case is 0)";
#>
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,28 @@

namespace Samples.Dynamic.Trainers.Regression
{
public static class FastForestWithOptions
public static class FastForestWithOptionsRegression
{
// This example requires installation of additional NuGet package
// <a href="https://www.nuget.org/packages/Microsoft.ML.FastTree/">Microsoft.ML.FastTree</a>.
// <a href="https://www.nuget.org/packages/Microsoft.ML.FastTree/">Microsoft.ML.FastTree</a>.
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for exception tracking and logging,
// as a catalog of available operations and as the source of randomness.
// Setting the seed to a fixed number in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);

// Create a list of training examples.
var examples = GenerateRandomDataPoints(1000);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);

// Convert the examples list to an IDataView object, which is consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(examples);
// Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

// Define trainer options.
var options = new FastForestRegressionTrainer.Options
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Only use 80% of features to reduce over-fitting.
FeatureFraction = 0.8,
// Create a simpler model by penalizing usage of new features.
Expand All @@ -41,17 +43,17 @@ public static void Example()
// Train the model.
var model = pipeline.Fit(trainingData);

// Create testing examples. Use different random seed to make it different from training data.
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
// Create testing data. Use different random seed to make it different from training data.
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));

// Run the model on test data set.
var transformedTestData = model.Transform(testData);

// Convert IDataView object to a list.
var predictions = mlContext.Data.CreateEnumerable<Prediction>(transformedTestData, reuseRowObject: false).ToList();

// Look at 5 predictions
foreach (var p in predictions.Take(5))
// Look at 5 predictions for the Label, side by side with the actual Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");

// Expected output:
Expand All @@ -63,27 +65,26 @@ public static void Example()

// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
Microsoft.ML.SamplesUtils.ConsoleUtils.PrintMetrics(metrics);
PrintMetrics(metrics);

// Expected output:
// Mean Absolute Error: 0.06
// Mean Squared Error: 0.01
// Root Mean Squared Error: 0.08
// RSquared: 0.93
// Root Mean Squared Error: 0.07
// RSquared: 0.95 (closer to 1 is better. The worest case is 0)
}

private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat();
float label = (float)random.NextDouble();
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with label.
Features = Enumerable.Repeat(label, 50).Select(x => x + randomFloat()).ToArray()
// Create random features that are correlated with the label.
Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
};
}
}
Expand All @@ -104,5 +105,15 @@ private class Prediction
// Predicted score from the trainer.
public float Score { get; set; }
}

// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
}
}
}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>

<#+
string ClassHeader = @"// This example requires installation of additional NuGet package
// <a href=""https://www.nuget.org/packages/Microsoft.ML.FastTree/"">Microsoft.ML.FastTree</a>. ";

string ClassName="FastForestWithOptionsRegression";
string ExtraUsing = "using Microsoft.ML.Trainers.FastTree;";
string Trainer = @"FastForest";
string TrainerOptions = @"FastForestRegressionTrainer.Options
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Only use 80% of features to reduce over-fitting.
FeatureFraction = 0.8,
// Create a simpler model by penalizing usage of new features.
Copy link
Member

@sfilipi sfilipi Apr 15, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

reate a simpler model by penalizing usage of new features. [](start = 20, length = 58)

great idea, on putting on comments. #Resolved

FeatureFirstUsePenalty = 0.1,
// Reduce the number of trees to 50.
NumberOfTrees = 50
}";

string ExpectedOutputPerInstance= @"// Expected output:
// Label: 0.985, Prediction: 0.866
// Label: 0.155, Prediction: 0.171
// Label: 0.515, Prediction: 0.470
// Label: 0.566, Prediction: 0.476
// Label: 0.096, Prediction: 0.140";

string ExpectedOutput = @"// Expected output:
// Mean Absolute Error: 0.06
// Mean Squared Error: 0.01
// Root Mean Squared Error: 0.07
// RSquared: 0.95 (closer to 1 is better. The worest case is 0)";
#>
Original file line number Diff line number Diff line change
Expand Up @@ -6,40 +6,40 @@

namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTree
public static class FastTreeRegression
{
// This example requires installation of additional NuGet package
// <a href="https://www.nuget.org/packages/Microsoft.ML.FastTree/">Microsoft.ML.FastTree</a>.
// <a href="https://www.nuget.org/packages/Microsoft.ML.FastTree/">Microsoft.ML.FastTree</a>.
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for exception tracking and logging,
// as a catalog of available operations and as the source of randomness.
// Setting the seed to a fixed number in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);

// Create a list of training examples.
var examples = GenerateRandomDataPoints(1000);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);

// Convert the examples list to an IDataView object, which is consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(examples);
// Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

// Define the trainer.
var pipeline = mlContext.Regression.Trainers.FastTree();
var pipeline = mlContext.Regression.Trainers.FastTree(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));

// Train the model.
var model = pipeline.Fit(trainingData);

// Create testing examples. Use different random seed to make it different from training data.
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
// Create testing data. Use different random seed to make it different from training data.
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));

// Run the model on test data set.
var transformedTestData = model.Transform(testData);

// Convert IDataView object to a list.
var predictions = mlContext.Data.CreateEnumerable<Prediction>(transformedTestData, reuseRowObject: false).ToList();

// Look at 5 predictions
foreach (var p in predictions.Take(5))
// Look at 5 predictions for the Label, side by side with the actual Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");

// Expected output:
Expand All @@ -51,27 +51,26 @@ public static void Example()

// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
Microsoft.ML.SamplesUtils.ConsoleUtils.PrintMetrics(metrics);
PrintMetrics(metrics);

// Expected output:
// Mean Absolute Error: 0.05
// Mean Absolute Error: 0.03
// Mean Squared Error: 0.00
// Root Mean Squared Error: 0.06
// RSquared: 0.95
// Root Mean Squared Error: 0.03
// RSquared: 0.99 (closer to 1 is better. The worest case is 0)
}

private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat();
float label = (float)random.NextDouble();
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with label.
Features = Enumerable.Repeat(label, 50).Select(x => x + randomFloat()).ToArray()
// Create random features that are correlated with the label.
Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
};
}
}
Expand All @@ -92,5 +91,15 @@ private class Prediction
// Predicted score from the trainer.
public float Score { get; set; }
}

// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
}
}
}
}

Loading