@@ -36,7 +36,7 @@ impl LinearRegression {
36
36
/// to match the training data distribution.
37
37
///
38
38
/// `self` is modified in place, nothing is returned.
39
- pub fn fit < A , B > ( & mut self , X : ArrayBase < A , Ix2 > , y : ArrayBase < B , Ix1 > )
39
+ pub fn fit < A , B > ( & mut self , X : & ArrayBase < A , Ix2 > , y : & ArrayBase < B , Ix1 > )
40
40
where
41
41
A : Data < Elem = f64 > ,
42
42
B : Data < Elem = f64 > ,
@@ -50,7 +50,7 @@ impl LinearRegression {
50
50
self . beta = if self . fit_intercept {
51
51
let dummy_column: Array < f64 , _ > = Array :: ones ( ( n_samples, 1 ) ) ;
52
52
let X = stack ( Axis ( 1 ) , & [ dummy_column. view ( ) , X . view ( ) ] ) . unwrap ( ) ;
53
- Some ( LinearRegression :: solve_normal_equation ( X , y) )
53
+ Some ( LinearRegression :: solve_normal_equation ( & X , y) )
54
54
} else {
55
55
Some ( LinearRegression :: solve_normal_equation ( X , y) )
56
56
} ;
@@ -77,13 +77,13 @@ impl LinearRegression {
77
77
}
78
78
}
79
79
80
- fn solve_normal_equation < A , B > ( X : ArrayBase < A , Ix2 > , y : ArrayBase < B , Ix1 > ) -> Array1 < f64 >
80
+ fn solve_normal_equation < A , B > ( X : & ArrayBase < A , Ix2 > , y : & ArrayBase < B , Ix1 > ) -> Array1 < f64 >
81
81
where
82
82
A : Data < Elem = f64 > ,
83
83
B : Data < Elem = f64 > ,
84
84
{
85
- let rhs = X . t ( ) . dot ( & y) ;
86
- let linear_operator = X . t ( ) . dot ( & X ) ;
85
+ let rhs = X . t ( ) . dot ( y) ;
86
+ let linear_operator = X . t ( ) . dot ( X ) ;
87
87
linear_operator. solve_into ( rhs) . unwrap ( )
88
88
}
89
89
0 commit comments