Skip to content

Commit 787d429

Browse files
committed
add travis build
1 parent 6d8e842 commit 787d429

File tree

4 files changed

+18
-7
lines changed

4 files changed

+18
-7
lines changed

.travis.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
language: node_js
2+
3+
node_js:
4+
- stable
5+
6+
install:
7+
- npm install
8+
9+
script:
10+
- npm test

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
# Multivariate Linear Regression with Gradient Descent
22

3+
[![Build Status](https://travis-ci.org/javascript-machine-learning/multivariate-linear-regression-gradient-descent-javascript.svg?branch=master)](https://travis-ci.org/javascript-machine-learning/multivariate-linear-regression-gradient-descent-javascript)
4+
35
This example project demonstrates how the [gradient descent](http://en.wikipedia.org/wiki/Gradient_descent) algorithm may be used to solve a [multivariate linear regression](http://en.wikipedia.org/wiki/Linear_regression) problem.
46

57
[Read more about it here.](https://www.robinwieruch.de/multivariate-linear-regression-gradient-descent-javascript/)

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@
44
"description": "",
55
"main": "index.js",
66
"scripts": {
7-
"start": "nodemon src/index.js --exec babel-node --presets es2015,stage-2"
7+
"start": "nodemon src/index.js --exec babel-node --presets es2015,stage-2",
8+
"test": "echo \"No test specified\" && exit 0"
89
},
910
"keywords": [],
1011
"author": "",

src/index.js

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,8 @@ import math from 'mathjs';
22
import csvToMatrix from 'csv-to-array-matrix';
33

44
import {
5-
getDimensionSize,
65
getMeanAsRowVector,
76
getStdAsRowVector,
8-
pushVector,
97
} from 'mathjs-util';
108

119
csvToMatrix('./src/data.csv', init);
@@ -22,7 +20,7 @@ function init(matrix) {
2220
matrix,
2321
});
2422

25-
let m = getDimensionSize(y, 1);
23+
let m = y.length;
2624

2725
// Part 1: Feature Normalization
2826
console.log('Part 1: Feature Normalization ...\n');
@@ -40,7 +38,7 @@ function init(matrix) {
4038
console.log('Part 2: Gradient Descent ...\n');
4139

4240
// Add Intercept Term
43-
XNorm = pushVector(XNorm, 0, math.ones([m, 1]).valueOf());
41+
XNorm = X = math.concat(math.ones([m, 1]).valueOf(), XNorm);
4442

4543
const ALPHA = 0.01;
4644
const ITERATIONS = 400;
@@ -68,7 +66,7 @@ function featureNormalize(X) {
6866
const sigma = getStdAsRowVector(X); // alternative: range
6967

7068
// n = features
71-
const n = getDimensionSize(X, 2);
69+
const n = X[0].length;
7270
for (let i = 0; i < n; i++) {
7371

7472
let featureVector = math.eval(`X[:, ${i + 1}]`, {
@@ -95,7 +93,7 @@ function featureNormalize(X) {
9593
}
9694

9795
function gradientDescentMulti(X, y, theta, ALPHA, ITERATIONS) {
98-
const m = getDimensionSize(y, 1);
96+
const m = y.length;
9997

10098
for (let i = 0; i < ITERATIONS; i++) {
10199
theta = math.eval(`theta - ALPHA / m * ((X * theta - y)' * X)'`, {

0 commit comments

Comments
 (0)