@article{JSSv007i01,
title={Dimension Reduction Regression in R},
volume={7},
url={https://www.jstatsoft.org/index.php/jss/article/view/v007i01},
doi={10.18637/jss.v007.i01},
abstract={Regression is the study of the dependence of a response variable y on a collection predictors p collected in x. In dimension reduction regression, we seek to find a few linear combinations β<sub>1</sub>x,...,β<sub>d</sub>x, such that all the information about the regression is contained in these linear combinations. If d is very small, perhaps one or two, then the regression problem can be summarized using simple graphics; for example, for d=1, the plot of y versus β<sub>1</sub>x contains all the regression information. When d=2, a 3D plot contains all the information. Several methods for estimating d and relevant functions of β<sub>1</sub>,..., β<sub>d</sub>have been suggested in the literature. In this paper, we describe an R package for three important dimension reduction methods: sliced inverse regression or sir, sliced average variance estimates, or save, and principal Hessian directions, or phd. The package is very general and flexible, and can be easily extended to include other methods of dimension reduction. It includes tests and estimates of the dimension , estimates of the relevant information including β<sub>1</sub>,..., β<sub>d</sub>, and some useful graphical summaries as well.},
number={1},
journal={Journal of Statistical Software},
author={Weisberg, Sanford},
year={2002},
pages={1–22}
}