4
4
5
5
def least_squares (phi , y ):
6
6
"""
7
- Least-squares (LS).
7
+ Solves the least-squares (LS) problem using the normal equations.
8
+
9
+ Parameters:
10
+ phi (ndarray): The design matrix with dimensions (m, n).
11
+ y (ndarray): The target values with dimensions (m,).
12
+
13
+ Returns:
14
+ ndarray: The estimated parameter vector with dimensions (n,).
8
15
"""
16
+ # Transpose the design matrix.
9
17
phi = np .transpose (phi )
18
+
19
+ # Compute the left and right hand sides of the normal equations.
10
20
theta_left = np .linalg .inv (np .matmul (phi , np .transpose (phi )))
11
21
theta_right = np .matmul (phi , y )
22
+
23
+ # Compute the estimated parameter vector.
12
24
theta = np .matmul (theta_left , theta_right )
25
+
26
+ # Flatten and return the parameter vector.
13
27
return np .ndarray .flatten (np .array (theta ))
14
28
15
29
16
30
def regularized_ls (phi , y , lamda ):
17
31
"""
18
- Regularized LS (RLS).
32
+ Solves the regularized least-squares (RLS) problem using ridge regression.
33
+
34
+ Parameters:
35
+ phi (ndarray): The design matrix with dimensions (m, n).
36
+ y (ndarray): The target values with dimensions (m,).
37
+ lamda (float): The regularization parameter.
38
+
39
+ Returns:
40
+ ndarray: The estimated parameter vector with dimensions (n,).
19
41
"""
42
+ # Transpose the design matrix.
20
43
phi = np .transpose (phi )
44
+
45
+ # Compute the left and right hand sides of the ridge regression problem.
21
46
theta_left = np .linalg .inv (np .matmul (phi , np .transpose (phi ))
22
47
+ lamda * np .matlib .identity (len (phi )))
23
48
theta_right = np .matmul (phi , y )
49
+
50
+ # Compute the estimated parameter vector.
24
51
theta = np .matmul (theta_left , theta_right )
52
+
53
+ # Flatten and return the parameter vector.
25
54
return np .ndarray .flatten (np .array (theta ))
26
55
27
56
28
57
def l1_regularized_ls (phi , y , lamda ):
29
58
"""
30
- L1-regularized LS (LASSO).
59
+ Solves the L1-regularized least-squares (LASSO) problem using linear programming.
60
+
61
+ Parameters:
62
+ phi (ndarray): The design matrix with dimensions (m, n).
63
+ y (ndarray): The target values with dimensions (m,).
64
+ lamda (float): The regularization parameter.
65
+
66
+ Returns:
67
+ ndarray: The estimated parameter vector with dimensions (n,).
31
68
"""
69
+ # Transpose the design matrix.
32
70
phi = np .transpose (phi )
71
+
72
+ # Compute the left and right hand sides of the LASSO problem.
33
73
phi_phi = np .matmul (phi , np .transpose (phi ))
34
74
phi_y = np .matmul (phi , y )
75
+
76
+ # Construct the optimization problem using the CVXOPT library.
35
77
P = matrix (np .concatenate ((
36
78
np .concatenate ((phi_phi , - phi_phi )),
37
79
np .concatenate ((- phi_phi , phi_phi ))
@@ -40,26 +82,50 @@ def l1_regularized_ls(phi, y, lamda):
40
82
- np .concatenate ((phi_y , - phi_y )))
41
83
G = matrix (- np .matlib .identity (2 * len (phi )))
42
84
h = matrix (np .zeros ([1 , 2 * len (phi )]))
85
+
86
+ # Solve the optimization problem.
43
87
sol = solvers .qp (P , q .T , G , h .T )
88
+
89
+ # Extract the estimated parameter vector.
44
90
theta_plus = sol ["x" ][: len (phi )]
45
91
theta_minus = sol ["x" ][len (phi ) :]
46
92
theta = theta_plus - theta_minus
93
+
94
+ # Flatten and return the parameter vector.
47
95
return np .ndarray .flatten (np .array (theta ))
48
96
49
97
50
98
def robust_regression (phi , y ):
51
99
"""
52
100
Robust regression (RR).
101
+
102
+ Solves the following optimization problem:
103
+ min ||x||_1
104
+ subject to y = phi' x
105
+
106
+ Parameters:
107
+ phi (np.array): Design matrix of shape (N, M)
108
+ y (np.array): Target values of shape (N,)
109
+
110
+ Returns:
111
+ x (np.array): Estimated coefficients of shape (M,)
53
112
"""
113
+ # Transpose the design matrix
54
114
phi = np .transpose (phi )
115
+
116
+ # Construct the optimization problem using cvxopt library
55
117
c = matrix (np .concatenate ((np .zeros ([1 , len (phi )]),
56
- np .ones ([1 , len (y )])), axis = 1 ))
118
+ np .ones ([1 , len (y )])), axis = 1 ))
57
119
id_mat = - np .matlib .identity (len (y ))
58
120
G = matrix (np .concatenate ((
59
121
np .concatenate ((- np .transpose (phi ), np .transpose (phi ))),
60
122
np .concatenate ((id_mat , id_mat ))
61
- ), axis = 1 ))
123
+ ), axis = 1 ))
62
124
h = matrix (np .concatenate ((- y , y )))
63
125
sol = solvers .lp (c .T , G , h )
64
- theta = sol ["x" ][: len (phi )]
65
- return np .ndarray .flatten (np .array (theta ))
126
+
127
+ # Extract the estimated coefficients from the solution
128
+ x = sol ["x" ][: len (phi )]
129
+
130
+ # Flatten the array and return
131
+ return np .ndarray .flatten (np .array (x ))
0 commit comments