@misc{cogprints571, title = {How to Make a Low-Dimensional Representation Suitable for Diverse Tasks}, author = {Nathan Intrator and Shimon Edelman}, year = {1996}, url = {http://cogprints.org/571/}, abstract = {We consider training classifiers for multiple tasks as a method for improving generalization and obtaining a better low-dimensional representation. To that end, we introduce a hybrid training methodology for MLP networks; the utility of the hidden-unit representation is assessed by embedding it into a 2D space using multidimensional scaling. The proposed methodology is tested on a highly nonlinear image classification task.} }