-
-
Notifications
You must be signed in to change notification settings - Fork 18
/
Copy pathmlr_learners_classif.cv_glmnet.Rd
236 lines (222 loc) · 10.9 KB
/
mlr_learners_classif.cv_glmnet.Rd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/LearnerClassifCVGlmnet.R
\name{mlr_learners_classif.cv_glmnet}
\alias{mlr_learners_classif.cv_glmnet}
\alias{LearnerClassifCVGlmnet}
\title{GLM with Elastic Net Regularization Classification Learner}
\description{
Generalized linear models with elastic net regularization.
Calls \code{\link[glmnet:cv.glmnet]{glmnet::cv.glmnet()}} from package \CRANpkg{glmnet}.
The default for hyperparameter \code{family} is set to \code{"binomial"} or \code{"multinomial"},
depending on the number of classes.
}
\section{Dictionary}{
This \link{Learner} can be instantiated via the \link[mlr3misc:Dictionary]{dictionary} \link{mlr_learners} or with the associated sugar function \code{\link[=lrn]{lrn()}}:\preformatted{mlr_learners$get("classif.cv_glmnet")
lrn("classif.cv_glmnet")
}
}
\section{Meta Information}{
\itemize{
\item Task type: \dQuote{classif}
\item Predict Types: \dQuote{response}, \dQuote{prob}
\item Feature Types: \dQuote{logical}, \dQuote{integer}, \dQuote{numeric}
\item Required Packages: \CRANpkg{mlr3}, \CRANpkg{mlr3learners}, \CRANpkg{glmnet}
}
}
\section{Parameters}{
\tabular{lllll}{
Id \tab Type \tab Default \tab Range \tab Levels \cr
alignment \tab character \tab lambda \tab - \tab lambda, fraction \cr
alpha \tab numeric \tab 1 \tab \eqn{[0, 1]}{[0, 1]} \tab - \cr
big \tab numeric \tab 9.9e+35 \tab \eqn{(-\infty, \infty)}{(-Inf, Inf)} \tab - \cr
devmax \tab numeric \tab 0.999 \tab \eqn{[0, 1]}{[0, 1]} \tab - \cr
dfmax \tab integer \tab - \tab \eqn{[0, \infty)}{[0, Inf)} \tab - \cr
epsnr \tab numeric \tab 1e-08 \tab \eqn{[0, 1]}{[0, 1]} \tab - \cr
eps \tab numeric \tab 1e-06 \tab \eqn{[0, 1]}{[0, 1]} \tab - \cr
exclude \tab integer \tab - \tab \eqn{[1, \infty)}{[1, Inf)} \tab - \cr
exmx \tab numeric \tab 250 \tab \eqn{(-\infty, \infty)}{(-Inf, Inf)} \tab - \cr
fdev \tab numeric \tab 1e-05 \tab \eqn{[0, 1]}{[0, 1]} \tab - \cr
foldid \tab list \tab NULL \tab - \tab - \cr
gamma \tab list \tab - \tab - \tab - \cr
grouped \tab logical \tab TRUE \tab - \tab TRUE, FALSE \cr
intercept \tab logical \tab TRUE \tab - \tab TRUE, FALSE \cr
keep \tab logical \tab FALSE \tab - \tab TRUE, FALSE \cr
lambda.min.ratio \tab numeric \tab - \tab \eqn{[0, 1]}{[0, 1]} \tab - \cr
lambda \tab list \tab - \tab - \tab - \cr
lower.limits \tab list \tab - \tab - \tab - \cr
maxit \tab integer \tab 100000 \tab \eqn{[1, \infty)}{[1, Inf)} \tab - \cr
mnlam \tab integer \tab 5 \tab \eqn{[1, \infty)}{[1, Inf)} \tab - \cr
mxitnr \tab integer \tab 25 \tab \eqn{[1, \infty)}{[1, Inf)} \tab - \cr
mxit \tab integer \tab 100 \tab \eqn{[1, \infty)}{[1, Inf)} \tab - \cr
nfolds \tab integer \tab 10 \tab \eqn{[3, \infty)}{[3, Inf)} \tab - \cr
nlambda \tab integer \tab 100 \tab \eqn{[1, \infty)}{[1, Inf)} \tab - \cr
offset \tab list \tab NULL \tab - \tab - \cr
parallel \tab logical \tab FALSE \tab - \tab TRUE, FALSE \cr
penalty.factor \tab list \tab - \tab - \tab - \cr
pmax \tab integer \tab - \tab \eqn{[0, \infty)}{[0, Inf)} \tab - \cr
pmin \tab numeric \tab 1e-09 \tab \eqn{[0, 1]}{[0, 1]} \tab - \cr
prec \tab numeric \tab 1e-10 \tab \eqn{(-\infty, \infty)}{(-Inf, Inf)} \tab - \cr
predict.gamma \tab numeric \tab gamma.1se \tab \eqn{(-\infty, \infty)}{(-Inf, Inf)} \tab - \cr
relax \tab logical \tab FALSE \tab - \tab TRUE, FALSE \cr
s \tab numeric \tab lambda.1se \tab \eqn{[0, \infty)}{[0, Inf)} \tab - \cr
standardize \tab logical \tab TRUE \tab - \tab TRUE, FALSE \cr
standardize.response \tab logical \tab FALSE \tab - \tab TRUE, FALSE \cr
thresh \tab numeric \tab 1e-07 \tab \eqn{[0, \infty)}{[0, Inf)} \tab - \cr
trace.it \tab integer \tab 0 \tab \eqn{[0, 1]}{[0, 1]} \tab - \cr
type.gaussian \tab character \tab - \tab - \tab covariance, naive \cr
type.logistic \tab character \tab - \tab - \tab Newton, modified.Newton \cr
type.measure \tab character \tab deviance \tab - \tab deviance, class, auc, mse, mae \cr
type.multinomial \tab character \tab - \tab - \tab ungrouped, grouped \cr
upper.limits \tab list \tab - \tab - \tab - \cr
}
}
\section{Internal Encoding}{
Starting with \CRANpkg{mlr3} v0.5.0, the order of class labels is reversed prior to
model fitting to comply to the \code{\link[stats:glm]{stats::glm()}} convention that the negative class is provided
as the first factor level.
}
\examples{
if (requireNamespace("glmnet", quietly = TRUE)) {
learner = mlr3::lrn("classif.cv_glmnet")
print(learner)
# available parameters:
learner$param_set$ids()
}
}
\references{
Friedman J, Hastie T, Tibshirani R (2010).
\dQuote{Regularization Paths for Generalized Linear Models via Coordinate Descent.}
\emph{Journal of Statistical Software}, \bold{33}(1), 1--22.
\doi{10.18637/jss.v033.i01}.
}
\seealso{
\itemize{
\item Chapter in the \href{https://mlr3book.mlr-org.com/}{mlr3book}:
\url{https://mlr3book.mlr-org.com/basics.html#learners}
\item Package \href{https://github.com/mlr-org/mlr3extralearners}{mlr3extralearners} for more learners.
\item \link[mlr3misc:Dictionary]{Dictionary} of \link[=Learner]{Learners}: \link{mlr_learners}
\item \code{as.data.table(mlr_learners)} for a table of available \link[=Learner]{Learners} in the running session (depending on the loaded packages).
\item \CRANpkg{mlr3pipelines} to combine learners with pre- and postprocessing steps.
\item Extension packages for additional task types:
\itemize{
\item \CRANpkg{mlr3proba} for probabilistic supervised regression and survival analysis.
\item \CRANpkg{mlr3cluster} for unsupervised clustering.
}
\item \CRANpkg{mlr3tuning} for tuning of hyperparameters, \CRANpkg{mlr3tuningspaces}
for established default tuning spaces.
}
Other Learner:
\code{\link{mlr_learners_classif.glmnet}},
\code{\link{mlr_learners_classif.kknn}},
\code{\link{mlr_learners_classif.lda}},
\code{\link{mlr_learners_classif.log_reg}},
\code{\link{mlr_learners_classif.multinom}},
\code{\link{mlr_learners_classif.naive_bayes}},
\code{\link{mlr_learners_classif.nnet}},
\code{\link{mlr_learners_classif.qda}},
\code{\link{mlr_learners_classif.ranger}},
\code{\link{mlr_learners_classif.svm}},
\code{\link{mlr_learners_classif.xgboost}},
\code{\link{mlr_learners_regr.cv_glmnet}},
\code{\link{mlr_learners_regr.glmnet}},
\code{\link{mlr_learners_regr.kknn}},
\code{\link{mlr_learners_regr.km}},
\code{\link{mlr_learners_regr.lm}},
\code{\link{mlr_learners_regr.ranger}},
\code{\link{mlr_learners_regr.svm}},
\code{\link{mlr_learners_regr.xgboost}},
\code{\link{mlr_learners_surv.cv_glmnet}},
\code{\link{mlr_learners_surv.glmnet}},
\code{\link{mlr_learners_surv.ranger}},
\code{\link{mlr_learners_surv.xgboost}}
}
\concept{Learner}
\section{Super classes}{
\code{\link[mlr3:Learner]{mlr3::Learner}} -> \code{\link[mlr3:LearnerClassif]{mlr3::LearnerClassif}} -> \code{LearnerClassifCVGlmnet}
}
\section{Methods}{
\subsection{Public methods}{
\itemize{
\item \href{#method-new}{\code{LearnerClassifCVGlmnet$new()}}
\item \href{#method-selected_features}{\code{LearnerClassifCVGlmnet$selected_features()}}
\item \href{#method-importance}{\code{LearnerClassifCVGlmnet$importance()}}
\item \href{#method-clone}{\code{LearnerClassifCVGlmnet$clone()}}
}
}
\if{html}{
\out{<details ><summary>Inherited methods</summary>}
\itemize{
\item \out{<span class="pkg-link" data-pkg="mlr3" data-topic="Learner" data-id="base_learner">}\href{../../mlr3/html/Learner.html#method-base_learner}{\code{mlr3::Learner$base_learner()}}\out{</span>}
\item \out{<span class="pkg-link" data-pkg="mlr3" data-topic="Learner" data-id="format">}\href{../../mlr3/html/Learner.html#method-format}{\code{mlr3::Learner$format()}}\out{</span>}
\item \out{<span class="pkg-link" data-pkg="mlr3" data-topic="Learner" data-id="help">}\href{../../mlr3/html/Learner.html#method-help}{\code{mlr3::Learner$help()}}\out{</span>}
\item \out{<span class="pkg-link" data-pkg="mlr3" data-topic="Learner" data-id="predict">}\href{../../mlr3/html/Learner.html#method-predict}{\code{mlr3::Learner$predict()}}\out{</span>}
\item \out{<span class="pkg-link" data-pkg="mlr3" data-topic="Learner" data-id="predict_newdata">}\href{../../mlr3/html/Learner.html#method-predict_newdata}{\code{mlr3::Learner$predict_newdata()}}\out{</span>}
\item \out{<span class="pkg-link" data-pkg="mlr3" data-topic="Learner" data-id="print">}\href{../../mlr3/html/Learner.html#method-print}{\code{mlr3::Learner$print()}}\out{</span>}
\item \out{<span class="pkg-link" data-pkg="mlr3" data-topic="Learner" data-id="reset">}\href{../../mlr3/html/Learner.html#method-reset}{\code{mlr3::Learner$reset()}}\out{</span>}
\item \out{<span class="pkg-link" data-pkg="mlr3" data-topic="Learner" data-id="train">}\href{../../mlr3/html/Learner.html#method-train}{\code{mlr3::Learner$train()}}\out{</span>}
}
\out{</details>}
}
\if{html}{\out{<hr>}}
\if{html}{\out{<a id="method-new"></a>}}
\if{latex}{\out{\hypertarget{method-new}{}}}
\subsection{Method \code{new()}}{
Creates a new instance of this \link[R6:R6Class]{R6} class.
\subsection{Usage}{
\if{html}{\out{<div class="r">}}\preformatted{LearnerClassifCVGlmnet$new()}\if{html}{\out{</div>}}
}
}
\if{html}{\out{<hr>}}
\if{html}{\out{<a id="method-selected_features"></a>}}
\if{latex}{\out{\hypertarget{method-selected_features}{}}}
\subsection{Method \code{selected_features()}}{
Returns the set of selected features as reported by \code{\link[glmnet:predict.glmnet]{glmnet::predict.glmnet()}}
with \code{type} set to \code{"nonzero"}.
\subsection{Usage}{
\if{html}{\out{<div class="r">}}\preformatted{LearnerClassifCVGlmnet$selected_features(lambda = NULL)}\if{html}{\out{</div>}}
}
\subsection{Arguments}{
\if{html}{\out{<div class="arguments">}}
\describe{
\item{\code{lambda}}{(\code{numeric(1)})\cr
Custom \code{lambda}, defaults to the active lambda depending on parameter set.}
}
\if{html}{\out{</div>}}
}
\subsection{Returns}{
(\code{character()}) of feature names.
}
}
\if{html}{\out{<hr>}}
\if{html}{\out{<a id="method-importance"></a>}}
\if{latex}{\out{\hypertarget{method-importance}{}}}
\subsection{Method \code{importance()}}{
Returns importance scores, calculated from the path of lambda values.
First, the largest \code{lambda} at which the feature was first included in the model
with a nonzero coefficient is determined.
Second, the \code{\link[=rank]{rank()}} of these lambda values is calculated (using averaging for ties)
and returned as importance scores.
\subsection{Usage}{
\if{html}{\out{<div class="r">}}\preformatted{LearnerClassifCVGlmnet$importance()}\if{html}{\out{</div>}}
}
\subsection{Returns}{
(named \code{numeric()}) of importance scores.
}
}
\if{html}{\out{<hr>}}
\if{html}{\out{<a id="method-clone"></a>}}
\if{latex}{\out{\hypertarget{method-clone}{}}}
\subsection{Method \code{clone()}}{
The objects of this class are cloneable with this method.
\subsection{Usage}{
\if{html}{\out{<div class="r">}}\preformatted{LearnerClassifCVGlmnet$clone(deep = FALSE)}\if{html}{\out{</div>}}
}
\subsection{Arguments}{
\if{html}{\out{<div class="arguments">}}
\describe{
\item{\code{deep}}{Whether to make a deep clone.}
}
\if{html}{\out{</div>}}
}
}
}