-
Notifications
You must be signed in to change notification settings - Fork 5
/
lars.go
212 lines (175 loc) · 6.72 KB
/
lars.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
package mlpack
/*
#cgo CFLAGS: -I./capi -Wall
#cgo LDFLAGS: -L. -lmlpack_go_lars
#include <capi/lars.h>
#include <stdlib.h>
*/
import "C"
import "gonum.org/v1/gonum/mat"
type LarsOptionalParam struct {
Input *mat.Dense
InputModel *lars
Lambda1 float64
Lambda2 float64
NoIntercept bool
NoNormalize bool
Responses *mat.Dense
Test *mat.Dense
UseCholesky bool
Verbose bool
}
func LarsOptions() *LarsOptionalParam {
return &LarsOptionalParam{
Input: nil,
InputModel: nil,
Lambda1: 0,
Lambda2: 0,
NoIntercept: false,
NoNormalize: false,
Responses: nil,
Test: nil,
UseCholesky: false,
Verbose: false,
}
}
/*
An implementation of LARS: Least Angle Regression (Stagewise/laSso). This is
a stage-wise homotopy-based algorithm for L1-regularized linear regression
(LASSO) and L1+L2-regularized linear regression (Elastic Net).
This program is able to train a LARS/LASSO/Elastic Net model or load a model
from file, output regression predictions for a test set, and save the trained
model to a file. The LARS algorithm is described in more detail below:
Let X be a matrix where each row is a point and each column is a dimension,
and let y be a vector of targets.
The Elastic Net problem is to solve
min_beta 0.5 || X * beta - y ||_2^2 + lambda_1 ||beta||_1 +
0.5 lambda_2 ||beta||_2^2
If lambda1 > 0 and lambda2 = 0, the problem is the LASSO.
If lambda1 > 0 and lambda2 > 0, the problem is the Elastic Net.
If lambda1 = 0 and lambda2 > 0, the problem is ridge regression.
If lambda1 = 0 and lambda2 = 0, the problem is unregularized linear
regression.
For efficiency reasons, it is not recommended to use this algorithm with
"Lambda1" = 0. In that case, use the 'linear_regression' program, which
implements both unregularized linear regression and ridge regression.
To train a LARS/LASSO/Elastic Net model, the "Input" and "Responses"
parameters must be given. The "Lambda1", "Lambda2", and "UseCholesky"
parameters control the training options. A trained model can be saved with
the "OutputModel". If no training is desired at all, a model can be passed
via the "InputModel" parameter.
The program can also provide predictions for test data using either the
trained model or the given input model. Test points can be specified with the
"Test" parameter. Predicted responses to the test points can be saved with
the "OutputPredictions" output parameter.
For example, the following command trains a model on the data data and
responses responses with lambda1 set to 0.4 and lambda2 set to 0 (so, LASSO is
being solved), and then the model is saved to lasso_model:
// Initialize optional parameters for Lars().
param := mlpack.LarsOptions()
param.Input = data
param.Responses = responses
param.Lambda1 = 0.4
param.Lambda2 = 0
lasso_model, _ := mlpack.Lars(param)
The following command uses the lasso_model to provide predicted responses for
the data test and save those responses to test_predictions:
// Initialize optional parameters for Lars().
param := mlpack.LarsOptions()
param.InputModel = &lasso_model
param.Test = test
_, test_predictions := mlpack.Lars(param)
Input parameters:
- Input (mat.Dense): Matrix of covariates (X).
- InputModel (lars): Trained LARS model to use.
- Lambda1 (float64): Regularization parameter for l1-norm penalty.
Default value 0.
- Lambda2 (float64): Regularization parameter for l2-norm penalty.
Default value 0.
- NoIntercept (bool): Do not fit an intercept in the model.
- NoNormalize (bool): Do not normalize data to unit variance before
modeling.
- Responses (mat.Dense): Matrix of responses/observations (y).
- Test (mat.Dense): Matrix containing points to regress on (test
points).
- UseCholesky (bool): Use Cholesky decomposition during computation
rather than explicitly computing the full Gram matrix.
- Verbose (bool): Display informational messages and the full list of
parameters and timers at the end of execution.
Output parameters:
- outputModel (lars): Output LARS model.
- outputPredictions (mat.Dense): If --test_file is specified, this file
is where the predicted responses will be saved.
*/
func Lars(param *LarsOptionalParam) (lars, *mat.Dense) {
params := getParams("lars")
timers := getTimers()
disableBacktrace()
disableVerbose()
// Detect if the parameter was passed; set if so.
if param.Input != nil {
gonumToArmaMat(params, "input", param.Input, true)
setPassed(params, "input")
}
// Detect if the parameter was passed; set if so.
if param.InputModel != nil {
setLARS(params, "input_model", param.InputModel)
setPassed(params, "input_model")
}
// Detect if the parameter was passed; set if so.
if param.Lambda1 != 0 {
setParamDouble(params, "lambda1", param.Lambda1)
setPassed(params, "lambda1")
}
// Detect if the parameter was passed; set if so.
if param.Lambda2 != 0 {
setParamDouble(params, "lambda2", param.Lambda2)
setPassed(params, "lambda2")
}
// Detect if the parameter was passed; set if so.
if param.NoIntercept != false {
setParamBool(params, "no_intercept", param.NoIntercept)
setPassed(params, "no_intercept")
}
// Detect if the parameter was passed; set if so.
if param.NoNormalize != false {
setParamBool(params, "no_normalize", param.NoNormalize)
setPassed(params, "no_normalize")
}
// Detect if the parameter was passed; set if so.
if param.Responses != nil {
gonumToArmaMat(params, "responses", param.Responses, false)
setPassed(params, "responses")
}
// Detect if the parameter was passed; set if so.
if param.Test != nil {
gonumToArmaMat(params, "test", param.Test, true)
setPassed(params, "test")
}
// Detect if the parameter was passed; set if so.
if param.UseCholesky != false {
setParamBool(params, "use_cholesky", param.UseCholesky)
setPassed(params, "use_cholesky")
}
// Detect if the parameter was passed; set if so.
if param.Verbose != false {
setParamBool(params, "verbose", param.Verbose)
setPassed(params, "verbose")
enableVerbose()
}
// Mark all output options as passed.
setPassed(params, "output_model")
setPassed(params, "output_predictions")
// Call the mlpack program.
C.mlpackLars(params.mem, timers.mem)
// Initialize result variable and get output.
var outputModel lars
outputModel.getLARS(params, "output_model")
var outputPredictionsPtr mlpackArma
outputPredictions := outputPredictionsPtr.armaToGonumMat(params, "output_predictions")
// Clean memory.
cleanParams(params)
cleanTimers(timers)
// Return output(s).
return outputModel, outputPredictions
}