-
Notifications
You must be signed in to change notification settings - Fork 4.7k
/
ClassificationModel.cs
112 lines (103 loc) · 7.84 KB
/
ClassificationModel.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System;
using System.ComponentModel;
namespace Azure.ResourceManager.MachineLearning.Models
{
/// <summary> Enum for all classification models supported by AutoML. </summary>
public readonly partial struct ClassificationModel : IEquatable<ClassificationModel>
{
private readonly string _value;
/// <summary> Initializes a new instance of <see cref="ClassificationModel"/>. </summary>
/// <exception cref="ArgumentNullException"> <paramref name="value"/> is null. </exception>
public ClassificationModel(string value)
{
_value = value ?? throw new ArgumentNullException(nameof(value));
}
private const string LogisticRegressionValue = "LogisticRegression";
private const string SGDValue = "SGD";
private const string MultinomialNaiveBayesValue = "MultinomialNaiveBayes";
private const string BernoulliNaiveBayesValue = "BernoulliNaiveBayes";
private const string SVMValue = "SVM";
private const string LinearSVMValue = "LinearSVM";
private const string KNNValue = "KNN";
private const string DecisionTreeValue = "DecisionTree";
private const string RandomForestValue = "RandomForest";
private const string ExtremeRandomTreesValue = "ExtremeRandomTrees";
private const string LightGBMValue = "LightGBM";
private const string GradientBoostingValue = "GradientBoosting";
private const string XGBoostClassifierValue = "XGBoostClassifier";
/// <summary>
/// Logistic regression is a fundamental classification technique.
/// It belongs to the group of linear classifiers and is somewhat similar to polynomial and linear regression.
/// Logistic regression is fast and relatively uncomplicated, and it's convenient for you to interpret the results.
/// Although it's essentially a method for binary classification, it can also be applied to multiclass problems.
/// </summary>
public static ClassificationModel LogisticRegression { get; } = new ClassificationModel(LogisticRegressionValue);
/// <summary>
/// SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning applications
/// to find the model parameters that correspond to the best fit between predicted and actual outputs.
/// </summary>
public static ClassificationModel SGD { get; } = new ClassificationModel(SGDValue);
/// <summary>
/// The multinomial Naive Bayes classifier is suitable for classification with discrete features (e.g., word counts for text classification).
/// The multinomial distribution normally requires integer feature counts. However, in practice, fractional counts such as tf-idf may also work.
/// </summary>
public static ClassificationModel MultinomialNaiveBayes { get; } = new ClassificationModel(MultinomialNaiveBayesValue);
/// <summary> Naive Bayes classifier for multivariate Bernoulli models. </summary>
public static ClassificationModel BernoulliNaiveBayes { get; } = new ClassificationModel(BernoulliNaiveBayesValue);
/// <summary>
/// A support vector machine (SVM) is a supervised machine learning model that uses classification algorithms for two-group classification problems.
/// After giving an SVM model sets of labeled training data for each category, they're able to categorize new text.
/// </summary>
public static ClassificationModel SVM { get; } = new ClassificationModel(SVMValue);
/// <summary>
/// A support vector machine (SVM) is a supervised machine learning model that uses classification algorithms for two-group classification problems.
/// After giving an SVM model sets of labeled training data for each category, they're able to categorize new text.
/// Linear SVM performs best when input data is linear, i.e., data can be easily classified by drawing the straight line between classified values on a plotted graph.
/// </summary>
public static ClassificationModel LinearSVM { get; } = new ClassificationModel(LinearSVMValue);
/// <summary>
/// K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new datapoints
/// which further means that the new data point will be assigned a value based on how closely it matches the points in the training set.
/// </summary>
public static ClassificationModel KNN { get; } = new ClassificationModel(KNNValue);
/// <summary>
/// Decision Trees are a non-parametric supervised learning method used for both classification and regression tasks.
/// The goal is to create a model that predicts the value of a target variable by learning simple decision rules inferred from the data features.
/// </summary>
public static ClassificationModel DecisionTree { get; } = new ClassificationModel(DecisionTreeValue);
/// <summary>
/// Random forest is a supervised learning algorithm.
/// The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging method.
/// The general idea of the bagging method is that a combination of learning models increases the overall result.
/// </summary>
public static ClassificationModel RandomForest { get; } = new ClassificationModel(RandomForestValue);
/// <summary> Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many decision trees. It is related to the widely used random forest algorithm. </summary>
public static ClassificationModel ExtremeRandomTrees { get; } = new ClassificationModel(ExtremeRandomTreesValue);
/// <summary> LightGBM is a gradient boosting framework that uses tree based learning algorithms. </summary>
public static ClassificationModel LightGBM { get; } = new ClassificationModel(LightGBMValue);
/// <summary> The technique of transiting week learners into a strong learner is called Boosting. The gradient boosting algorithm process works on this theory of execution. </summary>
public static ClassificationModel GradientBoosting { get; } = new ClassificationModel(GradientBoostingValue);
/// <summary> XGBoost: Extreme Gradient Boosting Algorithm. This algorithm is used for structured data where target column values can be divided into distinct class values. </summary>
public static ClassificationModel XGBoostClassifier { get; } = new ClassificationModel(XGBoostClassifierValue);
/// <summary> Determines if two <see cref="ClassificationModel"/> values are the same. </summary>
public static bool operator ==(ClassificationModel left, ClassificationModel right) => left.Equals(right);
/// <summary> Determines if two <see cref="ClassificationModel"/> values are not the same. </summary>
public static bool operator !=(ClassificationModel left, ClassificationModel right) => !left.Equals(right);
/// <summary> Converts a string to a <see cref="ClassificationModel"/>. </summary>
public static implicit operator ClassificationModel(string value) => new ClassificationModel(value);
/// <inheritdoc />
[EditorBrowsable(EditorBrowsableState.Never)]
public override bool Equals(object obj) => obj is ClassificationModel other && Equals(other);
/// <inheritdoc />
public bool Equals(ClassificationModel other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase);
/// <inheritdoc />
[EditorBrowsable(EditorBrowsableState.Never)]
public override int GetHashCode() => _value?.GetHashCode() ?? 0;
/// <inheritdoc />
public override string ToString() => _value;
}
}