using System; using System.Collections.Generic; using System.Linq; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Data; using HeuristicLab.Optimization; using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; using HeuristicLab.Problems.DataAnalysis; namespace HeuristicLab.Problems.GeneticProgramming.GlucosePrediction { [StorableClass] [Item("Solution", "")] // almost a complete copy of RegressionSolutionBase and RegressionSolution // only change: skipping missing values in the target public sealed class Solution : DataAnalysisSolution, IRegressionSolution { private const string TrainingMeanSquaredErrorResultName = "Mean squared error (training)"; private const string TestMeanSquaredErrorResultName = "Mean squared error (test)"; private const string TrainingMeanAbsoluteErrorResultName = "Mean absolute error (training)"; private const string TestMeanAbsoluteErrorResultName = "Mean absolute error (test)"; private const string TrainingSquaredCorrelationResultName = "Pearson's R² (training)"; private const string TestSquaredCorrelationResultName = "Pearson's R² (test)"; private const string TrainingRelativeErrorResultName = "Average relative error (training)"; private const string TestRelativeErrorResultName = "Average relative error (test)"; private const string TrainingNormalizedMeanSquaredErrorResultName = "Normalized mean squared error (training)"; private const string TestNormalizedMeanSquaredErrorResultName = "Normalized mean squared error (test)"; private const string TrainingRootMeanSquaredErrorResultName = "Root mean squared error (training)"; private const string TestRootMeanSquaredErrorResultName = "Root mean squared error (test)"; private const string TrainingMeanSquaredErrorResultDescription = "Mean of squared errors of the model on the training partition"; private const string TestMeanSquaredErrorResultDescription = "Mean of squared errors of the model on the test partition"; private const string TrainingMeanAbsoluteErrorResultDescription = "Mean of absolute errors of the model on the training partition"; private const string TestMeanAbsoluteErrorResultDescription = "Mean of absolute errors of the model on the test partition"; private const string TrainingSquaredCorrelationResultDescription = "Squared Pearson's correlation coefficient of the model output and the actual values on the training partition"; private const string TestSquaredCorrelationResultDescription = "Squared Pearson's correlation coefficient of the model output and the actual values on the test partition"; private const string TrainingRelativeErrorResultDescription = "Average of the relative errors of the model output and the actual values on the training partition"; private const string TestRelativeErrorResultDescription = "Average of the relative errors of the model output and the actual values on the test partition"; private const string TrainingNormalizedMeanSquaredErrorResultDescription = "Normalized mean of squared errors of the model on the training partition"; private const string TestNormalizedMeanSquaredErrorResultDescription = "Normalized mean of squared errors of the model on the test partition"; private const string TrainingRootMeanSquaredErrorResultDescription = "Root mean of squared errors of the model on the training partition"; private const string TestRootMeanSquaredErrorResultDescription = "Root mean of squared errors of the model on the test partition"; [StorableConstructor] public Solution(bool deserializing) : base(deserializing) { } public Solution(Solution original, Cloner cloner) : base(original, cloner) { } public Solution(IRegressionModel model, IRegressionProblemData problemData) : base(model, problemData) { Add(new Result(TrainingMeanSquaredErrorResultName, TrainingMeanSquaredErrorResultDescription, new DoubleValue())); Add(new Result(TestMeanSquaredErrorResultName, TestMeanSquaredErrorResultDescription, new DoubleValue())); Add(new Result(TrainingMeanAbsoluteErrorResultName, TrainingMeanAbsoluteErrorResultDescription, new DoubleValue())); Add(new Result(TestMeanAbsoluteErrorResultName, TestMeanAbsoluteErrorResultDescription, new DoubleValue())); Add(new Result(TrainingSquaredCorrelationResultName, TrainingSquaredCorrelationResultDescription, new DoubleValue())); Add(new Result(TestSquaredCorrelationResultName, TestSquaredCorrelationResultDescription, new DoubleValue())); Add(new Result(TrainingRelativeErrorResultName, TrainingRelativeErrorResultDescription, new PercentValue())); Add(new Result(TestRelativeErrorResultName, TestRelativeErrorResultDescription, new PercentValue())); Add(new Result(TrainingNormalizedMeanSquaredErrorResultName, TrainingNormalizedMeanSquaredErrorResultDescription, new DoubleValue())); Add(new Result(TestNormalizedMeanSquaredErrorResultName, TestNormalizedMeanSquaredErrorResultDescription, new DoubleValue())); Add(new Result(TrainingRootMeanSquaredErrorResultName, TrainingRootMeanSquaredErrorResultDescription, new DoubleValue())); Add(new Result(TestRootMeanSquaredErrorResultName, TestRootMeanSquaredErrorResultDescription, new DoubleValue())); CalculateRegressionResults(); } public override IDeepCloneable Clone(Cloner cloner) { return new Solution(this, cloner); } protected override void RecalculateResults() { CalculateRegressionResults(); } private void CalculateRegressionResults() { IEnumerable estimatedTrainingValues = EstimatedTrainingValues; // cache values IEnumerable originalTrainingValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices); IEnumerable estimatedTestValues = EstimatedTestValues; // cache values IEnumerable originalTestValues = ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TestIndices); // only take predictions for which the target is not NaN var selectedTrainingTuples = originalTrainingValues.Zip(estimatedTrainingValues, Tuple.Create).Where(t => !double.IsNaN(t.Item1)).ToArray(); originalTrainingValues = selectedTrainingTuples.Select(t => t.Item1); estimatedTrainingValues = selectedTrainingTuples.Select(t => t.Item2); var selectedTestTuples = originalTestValues.Zip(estimatedTestValues, Tuple.Create).Where(t => !double.IsNaN(t.Item1)).ToArray(); originalTestValues = selectedTestTuples.Select(t => t.Item1); estimatedTestValues = selectedTestTuples.Select(t => t.Item2); OnlineCalculatorError errorState; double trainingMSE = OnlineMeanSquaredErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState); TrainingMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingMSE : double.NaN; double testMSE = OnlineMeanSquaredErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState); TestMeanSquaredError = errorState == OnlineCalculatorError.None ? testMSE : double.NaN; double trainingMAE = OnlineMeanAbsoluteErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState); TrainingMeanAbsoluteError = errorState == OnlineCalculatorError.None ? trainingMAE : double.NaN; double testMAE = OnlineMeanAbsoluteErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState); TestMeanAbsoluteError = errorState == OnlineCalculatorError.None ? testMAE : double.NaN; double trainingR = OnlinePearsonsRCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState); TrainingRSquared = errorState == OnlineCalculatorError.None ? trainingR * trainingR : double.NaN; double testR = OnlinePearsonsRCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState); TestRSquared = errorState == OnlineCalculatorError.None ? testR * testR : double.NaN; double trainingRelError = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState); TrainingRelativeError = errorState == OnlineCalculatorError.None ? trainingRelError : double.NaN; double testRelError = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState); TestRelativeError = errorState == OnlineCalculatorError.None ? testRelError : double.NaN; double trainingNMSE = OnlineNormalizedMeanSquaredErrorCalculator.Calculate(originalTrainingValues, estimatedTrainingValues, out errorState); TrainingNormalizedMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingNMSE : double.NaN; double testNMSE = OnlineNormalizedMeanSquaredErrorCalculator.Calculate(originalTestValues, estimatedTestValues, out errorState); TestNormalizedMeanSquaredError = errorState == OnlineCalculatorError.None ? testNMSE : double.NaN; TrainingRootMeanSquaredError = Math.Sqrt(TrainingMeanSquaredError); TestRootMeanSquaredError = Math.Sqrt(TestMeanSquaredError); } public new IRegressionModel Model { get { return (IRegressionModel)base.Model; } private set { base.Model = value; } } public new IRegressionProblemData ProblemData { get { return (IRegressionProblemData)base.ProblemData; } set { base.ProblemData = value; } } public IEnumerable EstimatedValues { get { return GetEstimatedValues(Enumerable.Range(0, ProblemData.Dataset.Rows)); } } public IEnumerable EstimatedTrainingValues { get { var all = EstimatedValues.ToArray(); return ProblemData.TrainingIndices.Select(r => all[r]); } } public IEnumerable EstimatedTestValues { get { var all = EstimatedValues.ToArray(); return ProblemData.TestIndices.Select(r => all[r]); } } public IEnumerable GetEstimatedValues(IEnumerable rows) { var all = Model.GetEstimatedValues(ProblemData.Dataset, ProblemData.AllIndices).ToArray(); return rows.Select(r => all[r]); } #region Results public double TrainingMeanSquaredError { get { return ((DoubleValue)this[TrainingMeanSquaredErrorResultName].Value).Value; } private set { ((DoubleValue)this[TrainingMeanSquaredErrorResultName].Value).Value = value; } } public double TestMeanSquaredError { get { return ((DoubleValue)this[TestMeanSquaredErrorResultName].Value).Value; } private set { ((DoubleValue)this[TestMeanSquaredErrorResultName].Value).Value = value; } } public double TrainingMeanAbsoluteError { get { return ((DoubleValue)this[TrainingMeanAbsoluteErrorResultName].Value).Value; } private set { ((DoubleValue)this[TrainingMeanAbsoluteErrorResultName].Value).Value = value; } } public double TestMeanAbsoluteError { get { return ((DoubleValue)this[TestMeanAbsoluteErrorResultName].Value).Value; } private set { ((DoubleValue)this[TestMeanAbsoluteErrorResultName].Value).Value = value; } } public double TrainingRSquared { get { return ((DoubleValue)this[TrainingSquaredCorrelationResultName].Value).Value; } private set { ((DoubleValue)this[TrainingSquaredCorrelationResultName].Value).Value = value; } } public double TestRSquared { get { return ((DoubleValue)this[TestSquaredCorrelationResultName].Value).Value; } private set { ((DoubleValue)this[TestSquaredCorrelationResultName].Value).Value = value; } } public double TrainingRelativeError { get { return ((DoubleValue)this[TrainingRelativeErrorResultName].Value).Value; } private set { ((DoubleValue)this[TrainingRelativeErrorResultName].Value).Value = value; } } public double TestRelativeError { get { return ((DoubleValue)this[TestRelativeErrorResultName].Value).Value; } private set { ((DoubleValue)this[TestRelativeErrorResultName].Value).Value = value; } } public double TrainingNormalizedMeanSquaredError { get { return ((DoubleValue)this[TrainingNormalizedMeanSquaredErrorResultName].Value).Value; } private set { ((DoubleValue)this[TrainingNormalizedMeanSquaredErrorResultName].Value).Value = value; } } public double TestNormalizedMeanSquaredError { get { return ((DoubleValue)this[TestNormalizedMeanSquaredErrorResultName].Value).Value; } private set { ((DoubleValue)this[TestNormalizedMeanSquaredErrorResultName].Value).Value = value; } } public double TrainingRootMeanSquaredError { get { return ((DoubleValue)this[TrainingRootMeanSquaredErrorResultName].Value).Value; } private set { ((DoubleValue)this[TrainingRootMeanSquaredErrorResultName].Value).Value = value; } } public double TestRootMeanSquaredError { get { return ((DoubleValue)this[TestRootMeanSquaredErrorResultName].Value).Value; } private set { ((DoubleValue)this[TestRootMeanSquaredErrorResultName].Value).Value = value; } } #endregion } }