//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** @author John Miller * @version 1.1 * @date Sun Jan 11 19:05:20 EST 2015 * @see LICENSE (MIT style license file). */ // U N D E R D E V E L O P M E N T // FIX: needs improved optimization package scalation.analytics import scalation.linalgebra.{Matrix, MatrixD, VectorD} import scalation.math.IntWithExp._ import scalation.math.DoubleWithExp._ import scalation.plot.Plot import scalation.util.Error import RegTechnique._ //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `ExpRegression` class supports exponential regression. In this case, * 'x' is multi-dimensional [1, x_1, ... x_k]. Fit the parameter vector 'b' in the * exponential regression equation *

* log (mu (x)) = b dot x = b_0 + b_1 * x_1 + ... b_k * x_k *

* @see www.stat.uni-muenchen.de/~leiten/Lehre/Material/GLM_0708/chapterGLM.pdf * @param x the data/design matrix * @param nonneg whether to check that responses are nonnegative * @param y the response vector */ class ExpRegression (x: MatrixD, nonneg: Boolean, y: VectorD) extends Predictor with Error { if (x.dim1 != y.dim) flaw ("constructor", "dimensions of x and y are incompatible") if (nonneg && ! y.isNonnegative) flaw ("constructor", "response vector y must be nonnegative") def ll (b: VectorD): Double = 0.0 def ll_null (b: VectorD): Double = 0.0 //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Train the predictor by fitting the parameter vector (b-vector) in the * regression equation * y = b dot x + e = [b_0, ... b_k] dot [1, t, t^2 ... t^k] + e * using the least squares method. */ def train () { } //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Retrain the predictor by fitting the parameter vector (b-vector) in the * multiple regression equation * yy = b dot x + e = [b_0, ... b_k] dot [1, t, t^2 ... t^k] + e * using the least squares method. * @param yy the new response vector */ def train (yy: VectorD) { } //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Return the fit (parameter vector b, quality of fit including rSquared). */ def fit: Tuple4 [VectorD, Double, Double, Double] = null //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Predict the value of y = f(z) by evaluating the formula y = b dot z, * e.g., (b_0, b_1, b_2) dot (1, z_1, z_2). * @param z the new vector to predict */ def predict (z: VectorD): Double = 0.0 //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Predict the value of y = f(z) by evaluating the formula y = b dot zi for * each row zi of matrix z. * @param z the new matrix to predict */ def predict (z: Matrix): VectorD = null //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Perform backward elimination to remove the least predictive variable * from the model, returning the variable to eliminate, the new parameter * vector, the new R-squared value and the new F statistic. */ def backElim (): Tuple4 [Int, VectorD, Double, Double] = null //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Compute the Variance Inflation Factor (VIF) for each variable to test * for multi-colinearity by regressing xj against the rest of the variables. * A VIF over 10 indicates that over 90% of the varaince of xj can be predicted * from the other variables, so xj is a candidate for removal from the model. */ def vif: VectorD = null } // ExpRegression class //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `ExpRegressionTest` object tests `ExpRegression` class using the following * regression equation. *

* y = b dot x = b_0 + b_1*t + b_2*t^2. *

*/ object ExpRegressionTest extends App { val x = new MatrixD ((5, 3), 1.0, 36.0, 66.0, // 5-by-3 matrix 1.0, 37.0, 68.0, 1.0, 47.0, 64.0, 1.0, 32.0, 53.0, 1.0, 1.0, 101.0) val y = VectorD (745.0, 895.0, 442.0, 440.0, 1598.0) val z = VectorD (1.0, 20.0, 80.0) println ("x = " + x) println ("y = " + y) val erg = new ExpRegression (x, true, y) erg.train () println ("fit = " + erg.fit) val yp = erg.predict (z) println ("predict (" + z + ") = " + yp) } // ExpRegressionTest object