StandardTrainersCatalog.OnlineGradientDescent Method
Definition
Important
Some information relates to prerelease product that may be substantially modified before it’s released. Microsoft makes no warranties, express or implied, with respect to the information provided here.
Overloads
OnlineGradientDescent(RegressionCatalog+RegressionTrainers, String, String, IRegressionLoss, Single, Boolean, Single, Int32) |
Create OnlineGradientDescentTrainer, which predicts a target using a linear regression model. |
OnlineGradientDescent(RegressionCatalog+RegressionTrainers, OnlineGradientDescentTrainer+Options) |
Create OnlineGradientDescentTrainer using advanced options, which predicts a target using a linear regression model. |
OnlineGradientDescent(RegressionCatalog+RegressionTrainers, String, String, IRegressionLoss, Single, Boolean, Single, Int32)
Create OnlineGradientDescentTrainer, which predicts a target using a linear regression model.
public static Microsoft.ML.Trainers.OnlineGradientDescentTrainer OnlineGradientDescent (this Microsoft.ML.RegressionCatalog.RegressionTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", Microsoft.ML.Trainers.IRegressionLoss lossFunction = default, float learningRate = 0.1, bool decreaseLearningRate = true, float l2Regularization = 0, int numberOfIterations = 1);
static member OnlineGradientDescent : Microsoft.ML.RegressionCatalog.RegressionTrainers * string * string * Microsoft.ML.Trainers.IRegressionLoss * single * bool * single * int -> Microsoft.ML.Trainers.OnlineGradientDescentTrainer
<Extension()>
Public Function OnlineGradientDescent (catalog As RegressionCatalog.RegressionTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional lossFunction As IRegressionLoss = Nothing, Optional learningRate As Single = 0.1, Optional decreaseLearningRate As Boolean = true, Optional l2Regularization As Single = 0, Optional numberOfIterations As Integer = 1) As OnlineGradientDescentTrainer
Parameters
The regression catalog trainer object.
- featureColumnName
- String
The name of the feature column. The column data must be a known-sized vector of Single.
- lossFunction
- IRegressionLoss
The loss function minimized in the training process. Using, for example, SquaredLoss leads to a least square trainer.
- learningRate
- Single
The initial learning rate used by SGD.
- decreaseLearningRate
- Boolean
Decrease learning rate as iterations progress.
- l2Regularization
- Single
The L2 weight for regularization.
- numberOfIterations
- Int32
The number of passes through the training dataset.
Returns
Examples
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
namespace Samples.Dynamic.Trainers.Regression
{
public static class OnlineGradientDescent
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
var pipeline = mlContext.Regression.Trainers.OnlineGradientDescent(
labelColumnName: nameof(DataPoint.Label),
featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data.CreateEnumerable<Prediction>(
transformedTestData, reuseRowObject: false).ToList();
// Look at 5 predictions for the Label, side by side with the actual
// Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
// This trainer is not numerically stable.
// Please see issue #2425.
// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
PrintMetrics(metrics);
}
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
{
float label = (float)random.NextDouble();
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
Features = Enumerable.Repeat(label, 50).Select(
x => x + (float)random.NextDouble()).ToArray()
};
}
}
// Example with label and 50 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public float Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public float Label { get; set; }
// Predicted score from the trainer.
public float Score { get; set; }
}
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
Console.WriteLine(
"Root Mean Squared Error: " + metrics.RootMeanSquaredError);
Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
Applies to
OnlineGradientDescent(RegressionCatalog+RegressionTrainers, OnlineGradientDescentTrainer+Options)
Create OnlineGradientDescentTrainer using advanced options, which predicts a target using a linear regression model.
public static Microsoft.ML.Trainers.OnlineGradientDescentTrainer OnlineGradientDescent (this Microsoft.ML.RegressionCatalog.RegressionTrainers catalog, Microsoft.ML.Trainers.OnlineGradientDescentTrainer.Options options);
static member OnlineGradientDescent : Microsoft.ML.RegressionCatalog.RegressionTrainers * Microsoft.ML.Trainers.OnlineGradientDescentTrainer.Options -> Microsoft.ML.Trainers.OnlineGradientDescentTrainer
<Extension()>
Public Function OnlineGradientDescent (catalog As RegressionCatalog.RegressionTrainers, options As OnlineGradientDescentTrainer.Options) As OnlineGradientDescentTrainer
Parameters
The regression catalog trainer object.
Trainer options.
Returns
Examples
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;
namespace Samples.Dynamic.Trainers.Regression
{
public static class OnlineGradientDescentWithOptions
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
var options = new OnlineGradientDescentTrainer.Options
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Change the loss function.
LossFunction = new TweedieLoss(),
// Give an extra gain to more recent updates.
RecencyGain = 0.1f,
// Turn off lazy updates.
LazyUpdate = false,
// Specify scale for initial weights.
InitialWeightsDiameter = 0.2f
};
// Define the trainer.
var pipeline =
mlContext.Regression.Trainers.OnlineGradientDescent(options);
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data.CreateEnumerable<Prediction>(
transformedTestData, reuseRowObject: false).ToList();
// Look at 5 predictions for the Label, side by side with the actual
// Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
// This trainer is not numerically stable.
// Please see issue #2425.
// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
PrintMetrics(metrics);
// This trainer is not numerically stable. Please see
// issue #2425.
}
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
{
float label = (float)random.NextDouble();
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
Features = Enumerable.Repeat(label, 50).Select(
x => x + (float)random.NextDouble()).ToArray()
};
}
}
// Example with label and 50 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public float Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public float Label { get; set; }
// Predicted score from the trainer.
public float Score { get; set; }
}
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
Console.WriteLine(
"Root Mean Squared Error: " + metrics.RootMeanSquaredError);
Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}