1 | /**
|
2 | * The [R Squared](http://en.wikipedia.org/wiki/Coefficient_of_determination)
|
3 | * value of data compared with a function `f`
|
4 | * is the sum of the squared differences between the prediction
|
5 | * and the actual value.
|
6 | *
|
7 | * @param {Array<Array<number>>} x input data: this should be doubly-nested
|
8 | * @param {Function} func function called on `[i][0]` values within the dataset
|
9 | * @returns {number} r-squared value
|
10 | * @example
|
11 | * var samples = [[0, 0], [1, 1]];
|
12 | * var regressionLine = linearRegressionLine(linearRegression(samples));
|
13 | * rSquared(samples, regressionLine); // = 1 this line is a perfect fit
|
14 | */
|
15 | function rSquared(x, func) {
|
16 | if (x.length < 2) {
|
17 | return 1;
|
18 | }
|
19 |
|
20 | // Compute the average y value for the actual
|
21 | // data set in order to compute the
|
22 | // _total sum of squares_
|
23 | let sum = 0;
|
24 | for (let i = 0; i < x.length; i++) {
|
25 | sum += x[i][1];
|
26 | }
|
27 | const average = sum / x.length;
|
28 |
|
29 | // Compute the total sum of squares - the
|
30 | // squared difference between each point
|
31 | // and the average of all points.
|
32 | let sumOfSquares = 0;
|
33 | for (let j = 0; j < x.length; j++) {
|
34 | sumOfSquares += Math.pow(average - x[j][1], 2);
|
35 | }
|
36 |
|
37 | // Finally estimate the error: the squared
|
38 | // difference between the estimate and the actual data
|
39 | // value at each point.
|
40 | let err = 0;
|
41 | for (let k = 0; k < x.length; k++) {
|
42 | err += Math.pow(x[k][1] - func(x[k][0]), 2);
|
43 | }
|
44 |
|
45 | // As the error grows larger, its ratio to the
|
46 | // sum of squares increases and the r squared
|
47 | // value grows lower.
|
48 | return 1 - err / sumOfSquares;
|
49 | }
|
50 |
|
51 | export default rSquared;
|